Reactjs 将类组件重构为功能组件
我正在分析一个类组件中的一些音频,然后在另一个组件中呈现音频的波形可视化。我正在努力用钩子将它们重构成功能组件 问题:Reactjs 将类组件重构为功能组件,reactjs,refactoring,mediastream,react-functional-component,Reactjs,Refactoring,Mediastream,React Functional Component,我正在分析一个类组件中的一些音频,然后在另一个组件中呈现音频的波形可视化。我正在努力用钩子将它们重构成功能组件 问题: 我不知道如何重写this。在audioanalyzer中勾选bind方法 使用功能组件中的ref标记写入audiovisualizer中的canvas元素 类音频分析器扩展组件{ 建造师(道具){ 超级(道具); this.state={audioData:new-Uint8Array(0)}; this.tick=this.tick.bind(this); } compone
this。在audioanalyzer
中勾选bind
方法
ref
标记写入audiovisualizer
中的canvas
元素类音频分析器扩展组件{
建造师(道具){
超级(道具);
this.state={audioData:new-Uint8Array(0)};
this.tick=this.tick.bind(this);
}
componentDidMount(){
this.audioContext=new window.audioContext();
this.analyzer=this.audioContext.createAnalyzer();
this.dataArray=新UINT8阵列(this.analyzer.frequencyBinCount);
this.source=this.audioContext.createMediaStreamSource(this.props.audio);
本源连接(本分析仪);
this.rafId=requestAnimationFrame(this.tick);
}
勾选(){
this.analyzer.getByteTimeDomainData(this.dataArray);
this.setState({audioData:this.dataArray});
this.rafId=requestAnimationFrame(this.tick);
}
组件将卸载(){
取消动画帧(this.rafId);
这个.analyzer.disconnect();
this.source.disconnect();
}
render(){
返回;
}
}
输出默认音频分析仪;
class AudioVisualizer扩展组件{
建造师(道具){
超级(道具);
this.canvas=React.createRef();
}
componentDidUpdate(){
这个.draw();
}
画(){
const{audioData}=this.props;
const canvas=this.canvas.current;
const height=canvas.height;
const width=canvas.width;
const context=canvas.getContext('2d');
设x=0;
常量切片宽度=(宽度*1.0)/audioData.length;
context.lineWidth=2;
context.strokeStyle='#000000';
clearRect(0,0,宽度,高度);
context.beginPath();
上下文。移动到(0,高度/2);
用于(音频数据的常量项){
常数y=(项目/255.0)*高度;
lineTo(x,y);
x+=切片宽度;
}
上下文.lineTo(x,高度/2);
stroke();
}
render(){
返回;
}
}
导出默认音频可视化器;
未测试,但应朝此方向
import React,{useffect,useState}来自“React”;
功能useAudio(音频){
const[analyzer,setanalyzer]=useState();
const[audioContext,setAudioContext]=useState();
const[audioData,setAudioData]=useState();
常量[dataArray,setDataArray]=useState();
const[rafId,setRafId]=useState();
const[source,setSource]=useState();
useffect(()=>{
常数滴答=()=>{
Analyzer.getByteTimeDomainData(this.dataArray);
setAudioData(数据阵列);
setRafId(requestAnimationFrame(勾号));
};
setAudioContext(新建window.AudioContext());
setAnalyzer(audioContext.createAnalyzer());
setDataArray(新Uint8Array(Analyzer.frequencyBinCount));
setSource(audioContext.createMediaStreamSource(音频),()=>{
源。连接(分析仪);
});
setRafId(requestAnimationFrame(勾号));
return()=>{
取消动画帧(rafId);
分析仪。断开();
source.disconnect();
};
}, []);
返回[audioData,setAudioData];
}
功能音频分析器({audio}){
常量[音频数据]=使用音频(音频);
返回;
}
输出默认音频分析仪;
第二节课呢
import React,{useffect,useRef}来自“React”;
函数画布({audioData}){
const canvasRef=useRef();
const didMountRef=useRef(false);
useffect(()=>{
常量绘图=()=>{
const canvas=canvasRef.current;
const height=canvas.height;
const width=canvas.width;
const context=canvas.getContext('2d');
常量切片宽度=(宽度*1.0)/audioData.length;
context.lineWidth=2;
context.strokeStyle='#000000';
clearRect(0,0,宽度,高度);
context.beginPath();
上下文。移动到(0,高度/2);
设x=0;
用于(音频数据的常量项){
常数y=(项目/255.0)*高度;
lineTo(x,y);
x+=切片宽度;
}
上下文.lineTo(x,高度/2);
stroke();
};
if(参考电流){
draw();
}else didMountRef.current=真;
}, []);
返回;
}
导出{Canvas as default};
让我知道这是否对您有帮助,或者您是否需要对某些代码片段进行解释
class AudioAnalyser extends Component {
constructor(props) {
super(props);
this.state = { audioData: new Uint8Array(0) };
this.tick = this.tick.bind(this);
}
componentDidMount() {
this.audioContext = new window.AudioContext();
this.analyser = this.audioContext.createAnalyser();
this.dataArray = new Uint8Array(this.analyser.frequencyBinCount);
this.source = this.audioContext.createMediaStreamSource(this.props.audio);
this.source.connect(this.analyser);
this.rafId = requestAnimationFrame(this.tick);
}
tick() {
this.analyser.getByteTimeDomainData(this.dataArray);
this.setState({ audioData: this.dataArray });
this.rafId = requestAnimationFrame(this.tick);
}
componentWillUnmount() {
cancelAnimationFrame(this.rafId);
this.analyser.disconnect();
this.source.disconnect();
}
render() {
return <AudioVisualiser audioData={this.state.audioData} />;
}
}
export default AudioAnalyser;
class AudioVisualiser extends Component {
constructor(props) {
super(props);
this.canvas = React.createRef();
}
componentDidUpdate() {
this.draw();
}
draw() {
const { audioData } = this.props;
const canvas = this.canvas.current;
const height = canvas.height;
const width = canvas.width;
const context = canvas.getContext('2d');
let x = 0;
const sliceWidth = (width * 1.0) / audioData.length;
context.lineWidth = 2;
context.strokeStyle = '#000000';
context.clearRect(0, 0, width, height);
context.beginPath();
context.moveTo(0, height / 2);
for (const item of audioData) {
const y = (item / 255.0) * height;
context.lineTo(x, y);
x += sliceWidth;
}
context.lineTo(x, height / 2);
context.stroke();
}
render() {
return <canvas width="300" height="300" ref={this.canvas} />;
}
}
export default AudioVisualiser;