forked from loqwai/paper-cranes
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathindex.js
144 lines (127 loc) · 4.91 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
import { AudioProcessor } from './src/audio/AudioProcessor.js'
import { makeVisualizer } from './src/Visualizer.js'
import './index.css'
const events = ['touchstart', 'touchmove', 'touchstop', 'click', 'keydown', 'mousemove', 'mousedown', 'mouseup', 'resize']
let ranMain = false
let startTime = 0
const params = new URLSearchParams(window.location.search)
// check if we have microphone access. If so, just run main immediately
navigator.mediaDevices
.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false,
},
})
.then(() => main())
.catch(() => {
const body = document.querySelector('body')
body.classList.remove('ready')
})
if ('serviceWorker' in navigator) {
window.addEventListener('load', () => {
navigator.serviceWorker.register(new URL('/service-worker.js', import.meta.url)).then(
(registration) => {
console.log('ServiceWorker registration successful with scope: ', registration.scope)
},
(err) => {
console.log('ServiceWorker registration failed: ', err)
},
)
})
}
window.cranes = window.cranes || {}
window.cranes.overwrittenAudioFeatures = window.cranes.overwrittenAudioFeatures || {}
window.cranes.manualFeatures = window.cranes.manualFeatures || {}
window.cranes.freezeAudioFeatures = () => {
window.cranes.overwrittenAudioFeatures = { ...window.cranes.measuredAudioFeatures }
return window.cranes.overwrittenAudioFeatures
}
window.cranes.saveAudioFeatures = () => {
localStorage.setItem('overwrittenAudioFeatures', JSON.stringify(window.cranes.overwrittenAudioFeatures))
}
window.cranes.loadAudioFeatures = () => {
window.cranes.overwrittenAudioFeatures = JSON.parse(localStorage.getItem('overwrittenAudioFeatures'))
}
window.cranes.loadManualFeatures = (name) => {
window.cranes.manualFeatures = JSON.parse(localStorage.getItem(`manual-features-${name}`))
}
const main = async () => {
if (ranMain) return
window.c = cranes
startTime = performance.now()
const audio = await setupAudio()
const params = new URLSearchParams(window.location.search)
const shaderUrl = params.get('shader')
let shader
if (shaderUrl) {
shader = await getShader(shaderUrl)
}
if (!shader) {
shader = localStorage.getItem('cranes-manual-code')
}
if (!shader) {
shader = await getShader('default')
}
window.shader = shader
const initialImageUrl = params.get('image') ?? 'images/placeholder-image.png'
const fullscreen = (params.get('fullscreen') ?? false) === 'true'
const canvas = document.getElementById('visualizer')
const render = await makeVisualizer({ canvas, shader, initialImageUrl, fullscreen })
requestAnimationFrame(() => animate({ render, audio, shader }))
ranMain = true
}
// if the url contains the string 'edit', don't do this.
if (!window.location.href.includes('edit')) {
events.forEach((event) => {
// get the visualizer
const visualizer = document.getElementById('visualizer')
visualizer.addEventListener(event, main, { once: true })
visualizer.addEventListener(
event,
() => {
document.documentElement.requestFullscreen()
},
{ once: true },
)
})
}
const setupAudio = async () => {
const audioContext = new AudioContext()
await audioContext.resume()
const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
const sourceNode = audioContext.createMediaStreamSource(stream)
const historySize = parseInt(params.get('history_size') ?? '500')
const audioProcessor = new AudioProcessor(audioContext, sourceNode, historySize)
await audioProcessor.start()
return audioProcessor
}
const getShader = async (url) => {
//if the url is not a full url, then it's a relative url
if (!url.includes('http')) {
url = `/shaders/${url}.frag`
}
const res = await fetch(url)
const fragmentShader = await res.text()
return fragmentShader
}
const animate = ({ render, audio, shader }) => {
shader = window.cranes?.shader ?? shader
const measuredAudioFeatures = audio.getFeatures()
const queryParamFeatures = {}
const params = new URLSearchParams(window.location.search)
// collect the rest of the params
for (const [key, value] of params) {
queryParamFeatures[key] = value
}
const { overwrittenAudioFeatures, manualFeatures } = window.cranes
window.cranes.measuredAudioFeatures = measuredAudioFeatures
const features = { ...measuredAudioFeatures, ...queryParamFeatures, ...overwrittenAudioFeatures, ...manualFeatures }
try {
render({ time: (performance.now() - startTime) / 1000, features, shader })
} catch (e) {
console.error(e)
}
requestAnimationFrame(() => animate({ render, audio, shader }))
}