Last active
June 25, 2019 22:59
-
-
Save ghardin137/6b5ead84540c246b2059822688e0a2b5 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const audioCtx = new (window.AudioContext || window.webkitAudioContext)(); | |
const Player = ({ tempo, phrases, assets, playing, length, togglePlaying, stopPlaying }) => { | |
const [audioLoading, setAudioLoading] = useState(true); | |
const [audioPhrases, setAudioPhrases] = useState({}); | |
const [beat, setBeat] = useState(0); | |
const timeout = useRef(); | |
const playingNodes = useRef([]); | |
useEffect(() => { | |
const buffers = assets.map(asset => { | |
if (asset.url) { | |
return fetch(asset.url) | |
.then(res => res.arrayBuffer()) | |
.then(buffer => audioCtx.decodeAudioData(buffer)); | |
} | |
return Promise.resolve(); | |
}); | |
Promise.all(buffers).then(audio => { | |
const audioBuffers = assets.reduce((result, current, index) => { | |
if (!result[current.id]) result[current.id] = audio[index]; | |
return result; | |
}, {}); | |
setAudioPhrases(audioBuffers); | |
setAudioLoading(false); | |
}); | |
}, [assets]); | |
useEffect(() => { | |
if (playing) { | |
timeout.current = setInterval(() => { | |
setBeat(prev => prev + 1); | |
}, 60000 / tempo); | |
} else { | |
if (timeout.current) { | |
clearInterval(timeout.current); | |
setBeat(0); | |
} | |
} | |
return () => { | |
if (timeout.current) clearInterval(timeout.current); | |
}; | |
}, [playing]); | |
useEffect(() => { | |
if (playing) { | |
stopPhrases(beat); | |
if (beat > length) { | |
stopPlaying(); | |
} else { | |
if (phrases[beat] && phrases[beat].length > 0) { | |
phrases[beat].forEach(phrase => { | |
const audioCtx = new AudioContext({ latencyHint: 'interactive' }); | |
const audio = audioCtx.createBufferSource(); | |
audio.buffer = audioPhrases[phrase.asset.id]; | |
const gain = audioCtx.createGain(); | |
gain.gain.setValueAtTime(phrase.volume / 100, audioCtx.currentTime); | |
gain.connect(audioCtx.destination); | |
audio.connect(gain); | |
audio.start(); | |
playingNodes.current.push({ ...phrase, node: audio }); | |
}); | |
} | |
} | |
} | |
}, [beat, playing, audioPhrases]); | |
const stopPhrases = beat => { | |
playingNodes.current = playingNodes.current.reduce((nodes, node) => { | |
if (node.end <= beat) { | |
node.node.stop(); | |
} else { | |
nodes.push(node); | |
} | |
return nodes; | |
}, []); | |
}; | |
const stop = useCallback(() => { | |
stopPlaying(); | |
stopPhrases(Number.MAX_SAFE_INTEGER); | |
}, []); | |
return ( | |
<PlayerContainer> | |
<button onClick={togglePlaying} disabled={audioLoading}> | |
{playing ? 'Pause' : 'Play'} | |
</button> | |
<button onClick={stop} disabled={audioLoading}> | |
Stop | |
</button> | |
<span style={{ fontWeight: 'bold', fontSize: 20 }}> | |
{beat * (60 / tempo)}/{length * (60 / tempo)}s | |
</span> | |
</PlayerContainer> | |
); | |
}; |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// @flow | |
import React, { Component } from 'react'; | |
import styled from '@emotion/styled'; | |
const PlayerContainer = styled.section` | |
flex-shrink: 0; | |
border-top: 1px solid black; | |
padding: 20px; | |
button { | |
padding: 7px 12px; | |
border-radius: 3px; | |
color: ${props => props.theme.colors.white}; | |
background-color: ${props => props.theme.colors.tertiary}; | |
margin-right: 10px; | |
} | |
`; | |
const audioCtx = new (window.AudioContext || window.webkitAudioContext)(); | |
export default class Player extends Component { | |
state = { | |
audioLoading: true, | |
audioPhrases: {}, | |
beat: 0, | |
}; | |
timeout = null; | |
playingNodes = []; | |
componentDidMount() { | |
this.loadAudio(); | |
} | |
componentDidUpdate(prevProps, prevState) { | |
if (prevProps.assets !== this.props.assets) { | |
this.loadAudio(); | |
} | |
if (prevProps.playing !== this.props.playing) { | |
this.play(); | |
} | |
if (prevState.beat !== this.state.beat) { | |
this.beat(); | |
} | |
} | |
componentWillUnmount() { | |
if (this.timeout) clearInterval(this.timeout); | |
} | |
loadAudio = () => { | |
const buffers = this.props.assets.map(asset => { | |
if (asset.url) { | |
return fetch(asset.url) | |
.then(res => res.arrayBuffer()) | |
.then(buffer => audioCtx.decodeAudioData(buffer)); | |
} | |
return Promise.resolve(); | |
}); | |
Promise.all(buffers).then(audio => { | |
const audioBuffers = this.props.assets.reduce((result, current, index) => { | |
if (!result[current.id]) result[current.id] = audio[index]; | |
return result; | |
}, {}); | |
this.setState({ audioPhrases: audioBuffers, audioLoading: false }); | |
}); | |
}; | |
beat = () => { | |
const { beat, audioPhrases } = this.state; | |
const { playing, phrases, length } = this.props; | |
if (playing) { | |
this.stopPhrases(beat); | |
if (beat > length) { | |
this.props.stopPlaying(); | |
} else { | |
if (phrases[beat] && phrases[beat].length > 0) { | |
phrases[beat].forEach(phrase => { | |
const audioCtx = new AudioContext({ latencyHint: 'interactive' }); | |
const audio = audioCtx.createBufferSource(); | |
audio.buffer = audioPhrases[phrase.asset.id]; | |
const gain = audioCtx.createGain(); | |
gain.gain.setValueAtTime(phrase.volume / 100, audioCtx.currentTime); | |
gain.connect(audioCtx.destination); | |
audio.connect(gain); | |
audio.start(); | |
this.playingNodes.push({ ...phrase, node: audio }); | |
}); | |
} | |
} | |
} | |
}; | |
play = () => { | |
if (this.props.playing) { | |
this.timeout = setInterval(() => { | |
this.setState(prev => ({ beat: prev.beat + 1 })); | |
}, 60000 / this.props.tempo); | |
} else { | |
if (this.timeout) { | |
clearInterval(this.timeout); | |
this.setState({ beat: 0 }); | |
} | |
} | |
}; | |
stopPhrases = beat => { | |
this.playingNodes = this.playingNodes.reduce((nodes, node) => { | |
if (node.end <= beat) { | |
node.node.stop(); | |
} else { | |
nodes.push(node); | |
} | |
return nodes; | |
}, []); | |
}; | |
stop = () => { | |
this.props.stopPlaying(); | |
this.stopPhrases(Number.MAX_SAFE_INTEGER); | |
}; | |
render() { | |
const { tempo, playing, length, togglePlaying } = this.props; | |
const { audioLoading, beat } = this.state; | |
return ( | |
<PlayerContainer> | |
<button onClick={togglePlaying} disabled={audioLoading}> | |
{playing ? 'Pause' : 'Play'} | |
</button> | |
<button onClick={this.stop} disabled={audioLoading}> | |
Stop | |
</button> | |
<span style={{ fontWeight: 'bold', fontSize: 20 }}> | |
{beat * (60 / tempo)}/{length * (60 / tempo)}s | |
</span> | |
</PlayerContainer> | |
); | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment