Android TextureView视频的抗锯齿
我正在将TextureView与MediaPlayer一起用于我的自定义视频组件 如果视频的大小大于或等于TextureView的大小,则一切正常 但是如果视频的大小较小(例如720x576),而TextureView的大小为1280x1024,则质量很差,就像没有应用抗锯齿一样 有趣的是,如果我在完全相同的情况下使用SurfaceView,似乎SurfaceView应用了某种抗锯齿,因此我得到了更好的图片 我尝试将绘制对象应用于TextureView: 但这并不能改善情况 我发现使用setScaleX1.00001f;帮助不大,但质量仍然很差 是否有方法将抗锯齿应用于TextureView 这是组件代码Android TextureView视频的抗锯齿,android,opengl-es,glsl,interpolation,android-textureview,Android,Opengl Es,Glsl,Interpolation,Android Textureview,我正在将TextureView与MediaPlayer一起用于我的自定义视频组件 如果视频的大小大于或等于TextureView的大小,则一切正常 但是如果视频的大小较小(例如720x576),而TextureView的大小为1280x1024,则质量很差,就像没有应用抗锯齿一样 有趣的是,如果我在完全相同的情况下使用SurfaceView,似乎SurfaceView应用了某种抗锯齿,因此我得到了更好的图片 我尝试将绘制对象应用于TextureView: 但这并不能改善情况 我发现使用setSc
package com.example.app;
import android.app.Activity;
import android.content.Context;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.media.MediaPlayer;
import android.net.Uri;
import android.view.Display;
import android.view.Surface;
import android.view.TextureView;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import java.util.HashMap;
import com.example.app.entities.Channel;
public class TextureVideoView extends TextureView implements MediaPlayer.OnPreparedListener, TextureView.SurfaceTextureListener {
private Context context;
private MediaPlayer mediaPlayer;
private SurfaceTexture surfaceTexture;
private Uri uri;
private Surface surface;
private Channel.AspectRatio currentAspectRatio;
private Channel.AspectRatio targetAspectRatio;
private int videoWidth = 0;
private int videoHeight = 0;
private int screenWidth;
private int screenHeight;
private int targetState = STATE_IDLE;
private int currentState = STATE_IDLE;
private static final int STATE_IDLE = 0;
private static final int STATE_PLAYING = 1;
private static final int STATE_PAUSED = 2;
private static final int STATE_PREPARING = 3;
private static final int STATE_PREPARED = 4;
public TextureVideoView(Context context) {
super(context);
this.context = context;
Display display = ((Activity)context).getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
screenWidth = size.x;
screenHeight = size.y;
setScaleX(1.00001f);
Paint paint = new Paint();
paint.setDither(true);
paint.setFilterBitmap(true);
paint.setFlags(Paint.ANTI_ALIAS_FLAG);
paint.setAntiAlias(true);
setLayerPaint(paint);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
setLayoutParams(params);
currentAspectRatio = Channel.getInstance().getFullScreenAspectRatio();
setSurfaceTextureListener(this);
}
public void setVideoURI(Uri uri) {
release();
this.uri = uri;
if (surfaceTexture == null) {
return;
}
try {
mediaPlayer = new MediaPlayer();
mediaPlayer.setOnPreparedListener(this);
mediaPlayer.setDataSource(context, uri, new HashMap<String, String>());
mediaPlayer.setScreenOnWhilePlaying(true);
mediaPlayer.prepareAsync();
surface = new Surface(surfaceTexture);
mediaPlayer.setSurface(surface);
currentState = STATE_PREPARING;
}
catch (Exception e) {
}
}
public void start() {
if (isInPlaybackState()) {
mediaPlayer.start();
}
targetState = STATE_PLAYING;
}
public void pause() {
if (isInPlaybackState()) {
mediaPlayer.pause();
currentState = STATE_PAUSED;
}
targetState = STATE_PAUSED;
}
public void stopPlayback() {
if (mediaPlayer != null) {
mediaPlayer.stop();
mediaPlayer.release();
mediaPlayer = null;
currentState = STATE_IDLE;
targetState = STATE_IDLE;
}
}
public int getCurrentPosition() {
return mediaPlayer.getCurrentPosition();
}
public boolean isPlaying() {
return mediaPlayer.isPlaying();
}
private boolean isInPlaybackState() {
return mediaPlayer != null && (currentState == STATE_PLAYING || currentState == STATE_PREPARED);
}
private void release() {
if (mediaPlayer != null) {
mediaPlayer.reset();
mediaPlayer.release();
}
if (surface != null) {
surface.release();
}
}
@Override
public void onPrepared(MediaPlayer mp) {
currentState = STATE_PREPARED;
if (targetState == STATE_PLAYING) {
start();
}
else if (targetState == STATE_PAUSED) {
pause();
}
videoWidth = mediaPlayer.getVideoWidth();
videoHeight = mediaPlayer.getVideoHeight();
applyAspectRatio();
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
surfaceTexture = surface;
if (currentState == STATE_IDLE && uri != null) {
setVideoURI(uri);
}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
public void setAspectRatio(Channel.AspectRatio aspectRatio) {
targetAspectRatio = aspectRatio;
applyAspectRatio();
}
public void clearCurrentAspectRatio() {
currentAspectRatio = null;
videoWidth = 0;
videoHeight = 0;
}
private void applyAspectRatio() {
if (videoWidth == 0 || videoHeight == 0) {
return;
}
currentAspectRatio = targetAspectRatio;
System.out.println(currentAspectRatio.label);
System.out.println("screen width: " + screenWidth);
System.out.println("screen height: " + screenHeight);
System.out.println("original video width: " + videoWidth);
System.out.println("original video height: " + videoHeight);
ViewGroup.LayoutParams params = getLayoutParams();
if (currentAspectRatio.ratio == Channel.RATIO_FULL_WIDTH) {
params.width = screenWidth;
params.height = videoHeight * screenWidth / videoWidth;
}
else {
params.height = screenHeight;
switch (currentAspectRatio.ratio) {
case (Channel.RATIO_16_9):
params.width = screenHeight * 16 / 9;
break;
case (Channel.RATIO_4_3):
params.width = screenHeight * 4 / 3;
break;
case (Channel.RATIO_ORIGINAL):
params.width = videoWidth * screenHeight / videoHeight;
break;
}
}
System.out.println("video width: " + params.width);
System.out.println("video height: " + params.height);
if (params.width == getWidth() && params.height == getHeight()) {
return;
}
setLayoutParams(params);
}
}
但我得到了这个错误:
E/BufferQueue: [unnamed-28441-1] connect: already connected (cur=1, req=3)
怎么了
更新
最后,我按照@fadden的建议开始工作。但在我的例子中,GL_线性是不够的。SurfaceView使用了更高级的东西,如双三次插值
我尝试在GLSL中使用双立方插值作为基于源的片段着色器,如下所示:
但在我的情况下,它不能正常工作。图片变得越来越暗,性能也越来越差~5 fps,我还看到了水平和垂直条纹。有什么不对劲吗
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTextureCoord;
uniform samplerExternalOES sTexture;
uniform vec2 invScreenSize;
float BellFunc( float f )
{
float f = ( x / 2.0 ) * 1.5; // Converting -2 to +2 to -1.5 to +1.5
if( f > -1.5 && f < -0.5 )
{
return( 0.5 * pow(f + 1.5, 2.0));
}
else if( f > -0.5 && f < 0.5 )
{
return 3.0 / 4.0 - ( f * f );
}
else if( ( f > 0.5 && f < 1.5 ) )
{
return( 0.5 * pow(f - 1.5, 2.0));
}
return 0.0;
}
vec4 BiCubic( samplerExternalOES textureSampler, vec2 TexCoord )
{
float texelSizeX = 1.0 / invScreenSize.x; //size of one texel
float texelSizeY = 1.0 / invScreenSize.y; //size of one texel
vec4 nSum = vec4( 0.0, 0.0, 0.0, 0.0 );
vec4 nDenom = vec4( 0.0, 0.0, 0.0, 0.0 );
float a = fract( TexCoord.x * invScreenSize.x ); // get the decimal part
float b = fract( TexCoord.y * invScreenSize.y ); // get the decimal part
for( int m = -1; m <=2; m++ )
{
for( int n =-1; n<= 2; n++)
{
vec4 vecData = texture2D(textureSampler,
TexCoord + vec2(texelSizeX * float( m ),
texelSizeY * float( n )));
float f = BellFunc( float( m ) - a );
vec4 vecCooef1 = vec4( f,f,f,f );
float f1 = BellFunc ( -( float( n ) - b ) );
vec4 vecCoeef2 = vec4( f1, f1, f1, f1 );
nSum = nSum + ( vecData * vecCoeef2 * vecCooef1 );
nDenom = nDenom + (( vecCoeef2 * vecCooef1 ));
}
}
return nSum / nDenom;
}
void main() {
gl_FragColor = BiCubic(sTexture, vTextureCoord);
}
复制品?@fadden,事实上,我看到了那个问题。我尝试将该解决方案应用于该问题textureView.setScaleX1.00001f;,它确实可以工作,但结果仍然比SurfaceView差。SurfaceView缩放由显示硬件或合成器中的GPU执行,TextureView缩放由应用程序过程中的GPU执行。TextureView输出将使用配置为该纹理的GL_texture_MAG_过滤器的任何纹理缩放模式。如果没有设置,它将使用GL_NEAREST,这看起来不如GL_LINEAR。您可以尝试通过挖掘框架源来了解它在做什么,但我怀疑您需要将输出发送到SurfacetTexture,然后自己绘制到TextureView的曲面上。否。我的意思是将其渲染到一个独立的SurfaceTexture,它为您提供了一个OpenGL ES框架的外部纹理。然后,选择GL_线性缩放模式,使用GLE将其渲染到TextureView。如果您尝试使用位图和画布,您将通过软件而不是GPU移动所有像素,并且无法保持帧速率。你可以在Grafika中找到各种视频处理的例子。SurfacetTexture只是将它在表面上接收到的内容转换为外部纹理。它不会渲染它。您缺少的部分是,您的应用程序需要通过onFrameAvailable回调通知每个传入帧,然后将纹理渲染到TextureView。这是一个手动步骤-你不能只是把各个部分插在一起。Grafika中的FullFrameRect类对此很有用。
E/BufferQueue: [unnamed-28441-1] connect: already connected (cur=1, req=3)
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTextureCoord;
uniform samplerExternalOES sTexture;
uniform vec2 invScreenSize;
float BellFunc( float f )
{
float f = ( x / 2.0 ) * 1.5; // Converting -2 to +2 to -1.5 to +1.5
if( f > -1.5 && f < -0.5 )
{
return( 0.5 * pow(f + 1.5, 2.0));
}
else if( f > -0.5 && f < 0.5 )
{
return 3.0 / 4.0 - ( f * f );
}
else if( ( f > 0.5 && f < 1.5 ) )
{
return( 0.5 * pow(f - 1.5, 2.0));
}
return 0.0;
}
vec4 BiCubic( samplerExternalOES textureSampler, vec2 TexCoord )
{
float texelSizeX = 1.0 / invScreenSize.x; //size of one texel
float texelSizeY = 1.0 / invScreenSize.y; //size of one texel
vec4 nSum = vec4( 0.0, 0.0, 0.0, 0.0 );
vec4 nDenom = vec4( 0.0, 0.0, 0.0, 0.0 );
float a = fract( TexCoord.x * invScreenSize.x ); // get the decimal part
float b = fract( TexCoord.y * invScreenSize.y ); // get the decimal part
for( int m = -1; m <=2; m++ )
{
for( int n =-1; n<= 2; n++)
{
vec4 vecData = texture2D(textureSampler,
TexCoord + vec2(texelSizeX * float( m ),
texelSizeY * float( n )));
float f = BellFunc( float( m ) - a );
vec4 vecCooef1 = vec4( f,f,f,f );
float f1 = BellFunc ( -( float( n ) - b ) );
vec4 vecCoeef2 = vec4( f1, f1, f1, f1 );
nSum = nSum + ( vecData * vecCoeef2 * vecCooef1 );
nDenom = nDenom + (( vecCoeef2 * vecCooef1 ));
}
}
return nSum / nDenom;
}
void main() {
gl_FragColor = BiCubic(sTexture, vTextureCoord);
}