Android 位图/画布:java.lang.IllegalArgumentException:宽度和高度必须为>;0
我正在尝试为我已经运行的应用程序添加一个波形视频,它将录制声音,然后播放。录制/播放代码工作正常,但当我尝试添加可视化工具,录制自己的声音,然后尝试播放时,当我按下自己的播放按钮时,我遇到了崩溃 在他的visualizer应用程序中,他提供了自己的mp3声音文件,通过MediaPlayer播放。但由于我也通过MediaPlayer播放录制的声音,所以我认为调整代码以包含他的可视化部分应该很容易。但有点不对劲 奇怪的是,他的代码在我的Android手机(三星Galaxy 5)上运行得非常完美,我可以看到音乐中的可视化工具 我曾试图研究类似的问题,但没有找到答案。我试过了,两次都犯了类似的错误,但我没有找到解决办法 错误似乎来自他的代码,位图部分中的VisualizerView.java,其中Android 位图/画布:java.lang.IllegalArgumentException:宽度和高度必须为>;0,android,canvas,bitmap,rect,visualizer,Android,Canvas,Bitmap,Rect,Visualizer,我正在尝试为我已经运行的应用程序添加一个波形视频,它将录制声音,然后播放。录制/播放代码工作正常,但当我尝试添加可视化工具,录制自己的声音,然后尝试播放时,当我按下自己的播放按钮时,我遇到了崩溃 在他的visualizer应用程序中,他提供了自己的mp3声音文件,通过MediaPlayer播放。但由于我也通过MediaPlayer播放录制的声音,所以我认为调整代码以包含他的可视化部分应该很容易。但有点不对劲 奇怪的是,他的代码在我的Android手机(三星Galaxy 5)上运行得非常完美,我可
onDraw()
方法中的getWidth()
和getHeight()
是从Canvas
调用的。我还尝试记录这些值,但它们没有显示在我的LogCat
中。我正在使用Android Studio。谢谢你的帮助
编辑:
啊,我看到我的Log
语句不起作用,因为它放在崩溃点之后。我把它放在崩溃点的正前方:
if (mCanvasBitmap == null) {
mCanvasBitmap = Bitmap.createBitmap(canvas.getWidth(),
canvas.getHeight(), Config.ARGB_8888);
}
我从LogCat得到:getWidth的值是924,getHeight的值是0
。所以问题是,为什么高度是零
VisualizerView.java
package org.azurespot.waveform;
/**
* Created by mizu on 2/2/15.
*/
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PorterDuff.Mode;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.media.MediaPlayer;
import android.media.audiofx.Visualizer;
import android.util.AttributeSet;
import android.view.View;
import java.util.HashSet;
import java.util.Set;
import static android.util.Log.d;
/**
* A class that draws visualizations of data received from a
* {@link Visualizer.OnDataCaptureListener#onWaveFormDataCapture } and
* {@link Visualizer.OnDataCaptureListener#onFftDataCapture }
*/
public class VisualizerView extends View {
private static final String TAG = "VisualizerView";
private byte[] mBytes;
private byte[] mFFTBytes;
private Rect mRect = new Rect();
private Visualizer mVisualizer;
private Set<Renderer> mRenderers;
private Paint mFlashPaint = new Paint();
private Paint mFadePaint = new Paint();
public VisualizerView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs);
init();
}
public VisualizerView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public VisualizerView(Context context) {
this(context, null, 0);
}
private void init() {
mBytes = null;
mFFTBytes = null;
mFlashPaint.setColor(Color.argb(122, 255, 255, 255));
mFadePaint.setColor(Color.argb(238, 255, 255, 255)); // Adjust alpha to change how quickly the image fades
mFadePaint.setXfermode(new PorterDuffXfermode(Mode.MULTIPLY));
mRenderers = new HashSet<Renderer>();
}
/**
* Links the visualizer to a player
*
* @param player - MediaPlayer instance to link to
*/
public void link(MediaPlayer player) {
if (player == null) {
throw new NullPointerException("Cannot link to null MediaPlayer");
}
// Create the Visualizer object and attach it to our media player.
mVisualizer = new Visualizer(player.getAudioSessionId());
mVisualizer.setEnabled(false);
mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]);
// Pass through Visualizer data to VisualizerView
Visualizer.OnDataCaptureListener captureListener = new Visualizer.OnDataCaptureListener() {
@Override
public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate) {
updateVisualizer(bytes);
}
@Override
public void onFftDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate) {
updateVisualizerFFT(bytes);
}
};
mVisualizer.setDataCaptureListener(captureListener,
Visualizer.getMaxCaptureRate() / 2, true, true);
// Enabled Visualizer and disable when we're done with the stream
mVisualizer.setEnabled(true);
player.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mediaPlayer) {
mVisualizer.setEnabled(false);
}
});
}
public void addRenderer(Renderer renderer) {
if (renderer != null) {
mRenderers.add(renderer);
}
}
public void clearRenderers() {
mRenderers.clear();
}
/**
* Call to release the resources used by VisualizerView. Like with the
* MediaPlayer it is good practice to call this method
*/
public void release() {
mVisualizer.release();
}
/**
* Pass data to the visualizer. Typically this will be obtained from the
* Android Visualizer.OnDataCaptureListener call back. See
* {@link Visualizer.OnDataCaptureListener#onWaveFormDataCapture }
*
* @param bytes
*/
public void updateVisualizer(byte[] bytes) {
mBytes = bytes;
invalidate();
}
/**
* Pass FFT data to the visualizer. Typically this will be obtained from the
* Android Visualizer.OnDataCaptureListener call back. See
* {@link Visualizer.OnDataCaptureListener#onFftDataCapture }
*
* @param bytes
*/
public void updateVisualizerFFT(byte[] bytes) {
mFFTBytes = bytes;
invalidate();
}
boolean mFlash = false;
/**
* Call this to make the visualizer flash. Useful for flashing at the start
* of a song/loop etc...
*/
public void flash() {
mFlash = true;
invalidate();
}
Bitmap mCanvasBitmap;
Canvas mCanvas;
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
// Create canvas once we're ready to draw
mRect.set(0, 0, getWidth(), getHeight());
if (mCanvasBitmap == null) {
mCanvasBitmap = Bitmap.createBitmap(canvas.getWidth(),
canvas.getHeight(), Config.ARGB_8888);
}
d("DEBUG", "Value of getWidth is " + canvas.getWidth()
+ " and value of getHeight is " + canvas.getHeight());
if (mCanvas == null) {
mCanvas = new Canvas(mCanvasBitmap);
}
if (mBytes != null) {
// Render all audio renderers
AudioData audioData = new AudioData(mBytes);
for (Renderer r : mRenderers) {
r.render(mCanvas, audioData, mRect);
}
}
if (mFFTBytes != null) {
// Render all FFT renderers
FFTData fftData = new FFTData(mFFTBytes);
for (Renderer r : mRenderers) {
r.render(mCanvas, fftData, mRect);
}
}
// Fade out old contents
mCanvas.drawPaint(mFadePaint);
if (mFlash) {
mFlash = false;
mCanvas.drawPaint(mFlashPaint);
}
canvas.drawBitmap(mCanvasBitmap, new Matrix(), null);
}
}
原来在我的xml中包含我的
VisualizerView
小部件的FrameLayout
就是罪魁祸首!代码是从这里来的,所以我没有想到它,因为我以前看到过FrameLayout
在0dp
显示这种大小(在片段中)。但后来,由于一个偶然的机会,我决定把它做大一点,可视化工具出现了。当这些小事情发生的时候,真是太神奇了。千方百计!下面是我的xml中的小部件,我将高度更改为200dp
(从0dp
),并修复了它
活动制作声音.xml
<FrameLayout
android:layout_width="fill_parent"
android:layout_height="200dp"
android:layout_margin="10dp"
android:background="#000" >
<org.azurespot.waveform.VisualizerView
android:id="@+id/visualizerView"
android:layout_width="fill_parent"
android:layout_height="fill_parent" >
</org.azurespot.waveform.VisualizerView>
</FrameLayout>
也许:谢谢RC,但它实际上是在xml
中。疯狂…刚做了高度调整,效果很好。谢谢你@Noni A。
<FrameLayout
android:layout_width="fill_parent"
android:layout_height="200dp"
android:layout_margin="10dp"
android:background="#000" >
<org.azurespot.waveform.VisualizerView
android:id="@+id/visualizerView"
android:layout_width="fill_parent"
android:layout_height="fill_parent" >
</org.azurespot.waveform.VisualizerView>
</FrameLayout>