Android 使用GLSURFACHEVIEW修改具有效果的摄影机预览

Android 使用GLSURFACHEVIEW修改具有效果的摄影机预览,android,image-processing,opengl-es,android-camera,glsurfaceview,Android,Image Processing,Opengl Es,Android Camera,Glsurfaceview,我已经创建了一个自定义相机应用程序,我正在使用GLsurfaceview,GLsurfaceview.Renderer创建相机预览。我已经成功地完成了它,现在我正在尝试应用过滤器(乌贼模式,空白n白色等)的相机预览。如何在摄影机预览上应用效果 正常视图 应用过滤器 这是我的GLSurfaceView.Renderer public class MainRenderer implements GLSurfaceView.Renderer,SurfaceTexture.OnFrameAvaila

我已经创建了一个自定义相机应用程序,我正在使用
GLsurfaceview
GLsurfaceview.Renderer
创建相机预览。我已经成功地完成了它,现在我正在尝试应用过滤器(乌贼模式,空白n白色等)的相机预览。如何在摄影机预览上应用效果

正常视图

应用过滤器

这是我的GLSurfaceView.Renderer

public class MainRenderer implements GLSurfaceView.Renderer,SurfaceTexture.OnFrameAvailableListener{

private final String vss =
        "attribute vec2 vPosition;\n" +
                "attribute vec2 vTexCoord;\n" +
                "varying vec2 texCoord;\n" +
                "void main() {\n" +
                "  texCoord = vTexCoord;\n" +
                "  gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );\n" +
                "}";
private final String fss =
        "#extension GL_OES_EGL_image_external : require\n" +
                "precision mediump float;\n" +
                "uniform samplerExternalOES sTexture;\n" +
                "varying vec2 texCoord;\n" +
                "void main() {\n" +
                "  gl_FragColor = texture2D(sTexture,texCoord);\n" +
                "}";

private int[] hTex;
private FloatBuffer pVertex;
private FloatBuffer pTexCoord;
private int hProgram;

private Camera mCamera;
private SurfaceTexture mSTexture;

private boolean mUpdateST = false;

private CameraPreview mView;
Context mContext;
private String fileName;
private File sdRoot;
private String dir;
private ExifInterface exif;
private int orientation;
private android.hardware.Camera.PictureCallback pictureCallBack = new Camera.PictureCallback() {

    public void onPictureTaken(byte[] data, Camera camera) {

        fileName = "IMG_" + new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()).toString() + ".jpg";
        File mkDir = new File(sdRoot, dir);
        mkDir.mkdirs();
        File pictureFile = new File(sdRoot, dir + fileName);
        try {
            FileOutputStream purge = new FileOutputStream(pictureFile);
            purge.write(data);
            purge.close();
        } catch (FileNotFoundException e) {
            Log.d("DG_DEBUG", "File not found: " + e.getMessage());
        } catch (IOException e) {
            Log.d("DG_DEBUG", "Error accessing file: " + e.getMessage());
        }
        // Adding Exif data for the orientation. For some strange reason the
        // ExifInterface class takes a string instead of a file.
        try {
            exif = new ExifInterface("/sdcard/" + dir + fileName);
            exif.setAttribute(ExifInterface.TAG_ORIENTATION, "" + orientation);
            exif.saveAttributes();
        } catch (IOException e) {
            e.printStackTrace();
        }
        mContext.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.fromFile(pictureFile)));

    }
};;

public MainRenderer(CameraPreview cameraPreview, Context context) {
    mView = cameraPreview;
    mContext = context;
    mCamera = getCameraInstance();
    float[] vtmp = { 1.0f, -1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, 1.0f };




    float[] ttmp = { 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f };
    pVertex = ByteBuffer.allocateDirect(8 * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
    pVertex.put ( vtmp );
    pVertex.position(0);
    pTexCoord = ByteBuffer.allocateDirect(8*4).order(ByteOrder.nativeOrder()).asFloatBuffer();
    pTexCoord.put ( ttmp );
    pTexCoord.position(0);
}
public void close()
{
    mUpdateST = false;
    mSTexture.release();
    mCamera.stopPreview();
    mCamera.release();
    mCamera = null;
    deleteTex();
}
public void takePicture(File file,String dir,int orientation){
    this.orientation = orientation;
    sdRoot = file;
    this.dir=dir;
    mCamera.takePicture(null,null,pictureCallBack);
}
private void initTex() {
    hTex = new int[1];
    GLES20.glGenTextures(1, hTex, 0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, hTex[0]);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}

private void deleteTex() {
    GLES20.glDeleteTextures ( 1, hTex, 0 );
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
    initTex();
    mSTexture = new SurfaceTexture ( hTex[0] );
    mSTexture.setOnFrameAvailableListener(this);
    try {
        mCamera.setPreviewTexture(mSTexture);
    } catch ( IOException ioe ) {
    }
    catch (Exception e){
    }
    GLES20.glClearColor ( 1.0f, 1.0f, 0.0f, 1.0f );
    hProgram = loadShader ( vss, fss );
}

public Camera getCameraInstance() {
    Camera c = null;
    Camera.CameraInfo ci = new Camera.CameraInfo();
    try {
        for(int i=0;i<Camera.getNumberOfCameras();i++){
            Camera.getCameraInfo(i,ci);
            if(ci.facing== Camera.CameraInfo.CAMERA_FACING_FRONT)
                c=Camera.open(i);
        }
    } catch (Exception e) {
    }
    return c;

}
private static int loadShader ( String vss, String fss ) {
    int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
    GLES20.glShaderSource(vshader, vss);
    GLES20.glCompileShader(vshader);
    int[] compiled = new int[1];
    GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, compiled, 0);
    if (compiled[0] == 0) {
        Log.e("Shader", "Could not compile vshader");
        Log.v("Shader", "Could not compile vshader:"+GLES20.glGetShaderInfoLog(vshader));
        GLES20.glDeleteShader(vshader);
        vshader = 0;
    }

    int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
    GLES20.glShaderSource(fshader, fss);
    GLES20.glCompileShader(fshader);
    GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, compiled, 0);
    if (compiled[0] == 0) {
        Log.e("Shader", "Could not compile fshader");
        Log.v("Shader", "Could not compile fshader:"+GLES20.glGetShaderInfoLog(fshader));
        GLES20.glDeleteShader(fshader);
        fshader = 0;
    }

    int program = GLES20.glCreateProgram();
    GLES20.glAttachShader(program, vshader);
    GLES20.glAttachShader(program, fshader);
    GLES20.glLinkProgram(program);

    return program;
}

@Override
public void onSurfaceChanged ( GL10 unused, int width, int height ) {
    GLES20.glViewport( 0, 0, width, height );
    Camera.Parameters param = mCamera.getParameters();
    List<Camera.Size> psize = param.getSupportedPreviewSizes();
    if ( psize.size() > 0 ) {
        int i;
        for ( i = 0; i < psize.size(); i++ ) {
            if ( psize.get(i).width < width || psize.get(i).height < height )
                break;
        }
        if ( i > 0 )
            i--;
        param.setPreviewSize(psize.get(i).width, psize.get(i).height);
        //Log.i("mr","ssize: "+psize.get(i).width+", "+psize.get(i).height);
    }
    param.set("orientation", "portrait");
    mCamera.setParameters ( param );
    mCamera.startPreview();
}

@Override
public void onDrawFrame ( GL10 unused ) {
    GLES20.glClear( GLES20.GL_COLOR_BUFFER_BIT );

    synchronized(this) {
        if ( mUpdateST ) {
            mSTexture.updateTexImage();
            mUpdateST = false;
        }
    }

    GLES20.glUseProgram(hProgram);


    int ph = GLES20.glGetAttribLocation(hProgram, "vPosition");
    int tch = GLES20.glGetAttribLocation ( hProgram, "vTexCoord" );
    int th = GLES20.glGetUniformLocation ( hProgram, "sTexture" );

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, hTex[0]);
    GLES20.glUniform1i(th, 0);

    GLES20.glVertexAttribPointer(ph, 2, GLES20.GL_FLOAT, false, 4*2, pVertex);
    GLES20.glVertexAttribPointer(tch, 2, GLES20.GL_FLOAT, false, 4*2, pTexCoord );
    GLES20.glEnableVertexAttribArray(ph);
    GLES20.glEnableVertexAttribArray(tch);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    GLES20.glFlush();
}

@Override
public synchronized void onFrameAvailable ( SurfaceTexture st ) {
    mUpdateST = true;
    mView.requestRender();
}
public类MainRenderer实现GLSurfaceView.Renderer、SurfaceTexture.OnFrameAvailableListener{
专用最终字符串vss=
“属性向量2位置;\n”+
“属性vec2 vTexCoord;\n”+
“可变vec2 texCoord;\n”+
“void main(){\n”+
“texCoord=vTexCoord;\n”+
gl_位置=vec4(vPosition.x,vPosition.y,0.0,1.0);\n+
"}";
专用最终字符串fss=
“#扩展GL_OES_EGL_图像_外部:需要\n”+
“精度中间泵浮动;\n”+
“统一采样器外部结构;\n”+
“可变vec2 texCoord;\n”+
“void main(){\n”+
gl_FragColor=texture2D(sTexture,texCoord);\n+
"}";
私有int[]hTex;
私人浮动缓冲区pVertex;
私人浮动缓冲区;
私人电脑程式;
私人摄像机麦卡梅拉;
私有表面结构;
私有布尔值mUpdateST=false;
私人摄影评论mView;
语境;
私有字符串文件名;
私有文件sdRoot;
私有字符串目录;
私人进出口接口进出口基金;
私人内部导向;
private android.hardware.Camera.PictureCallback PictureCallback=新的Camera.PictureCallback(){
公共void onPictureTaken(字节[]数据,摄像头){
fileName=“IMG_”+新SimpleDataFormat(“yyyyMMdd_HHmmss”).format(新日期()).toString()+“.jpg”;
文件mkDir=新文件(sdRoot,dir);
mkDir.mkdirs();
File pictureFile=新文件(sdRoot,dir+文件名);
试一试{
FileOutputStream purge=新的FileOutputStream(pictureFile);
清除、写入(数据);
purge.close();
}catch(filenotfounde异常){
Log.d(“DG_调试”,“未找到文件:+e.getMessage());
}捕获(IOE异常){
Log.d(“DG_调试”,“访问文件时出错:”+e.getMessage());
}
//正在为方向添加Exif数据。出于某种奇怪的原因
//ExiFinInterface类接受字符串而不是文件。
试一试{
exif=新的ExifInterface(“/sdcard/”+dir+fileName);
setAttribute(ExifInterface.TAG_方向,“+”方向);
exif.saveAttributes();
}捕获(IOE异常){
e、 printStackTrace();
}
mContext.sendBroadcast(新的Intent(Intent.ACTION\u MEDIA\u SCANNER\u SCAN\u文件,Uri.fromFile(pictureFile));
}
};;
公共主渲染器(CameraPreview CameraPreview,上下文){
mView=cameraPreview;
mContext=上下文;
mCamera=getCameraInstance();
float[]vtmp={1.0f,-1.0f,-1.0f,-1.0f,1.0f,1.0f,-1.0f,1.0f};
float[]ttmp={0.0f,0.0f,0.0f,1.0f,1.0f,0.0f,1.0f,1.0f};
pVertex=ByteBuffer.allocateDirect(8*4).order(ByteOrder.nativeOrder()).asFloatBuffer();
pVertex.put(vtmp);
pVertex位置(0);
pTexCoord=ByteBuffer.allocateDirect(8*4.order)(ByteOrder.nativeOrder()).asFloatBuffer();
pTexCoord.put(ttmp);
pTexCoord.位置(0);
}
公众假期结束()
{
mUpdateST=false;
mSTexture.release();
mCamera.stopPreview();
mCamera.release();
mCamera=null;
deleteTex();
}
public void takePicture(文件、字符串目录、int方向){
这个方向=方向;
sdRoot=文件;
this.dir=dir;
takePicture(null,null,pictureCallBack);
}
私有void initTex(){
hTex=新整数[1];
GLES20.glGenTextures(1,hTex,0);
GLES20.glBindTexture(Gles11Text.GL_TEXTURE_EXTERNAL_OES,hTex[0]);
GLES20.glTexParameteri(Gles11Text.GL_纹理_外部_OES,GLES20.GL_纹理_包裹_S,GLES20.GL_夹紧_至_边缘);
GLES20.glTexParameteri(Gles11Text.GL_纹理_外部_OES,GLES20.GL_纹理_包裹_T,GLES20.GL_夹紧_至_边缘);
GLES20.glTexParameteri(GLES11Ext.GL_纹理_外部_OES,GLES20.GL_纹理_最小_过滤器,GLES20.GL_最近);
GLES20.glTexParameteri(GLES11Ext.GL_纹理_外部_OES,GLES20.GL_纹理_MAG_过滤器,GLES20.GL_最近);
}
私有void deleteTex(){
GLES20.glDeleteTextures(1,hTex,0);
}
@凌驾
已创建曲面上的公共void(GL10 gl、EGLConfig配置){
initTex();
mSTexture=新的表面纹理(hTex[0]);
mSTexture.setOnFrameAvailableListener(此);
试一试{
设置预览纹理(mSTexture);
}捕获(ioe异常ioe){
}
捕获(例外e){
}
GLES20.glClearColor(1.0f、1.0f、0.0f、1.0f);
hProgram=loadShader(vss、fss);
}
公共摄像机getCameraInstance(){
摄像机c=null;
Camera.CameraInfo ci=新的Camera.CameraInfo();
试一试{
对于(int i=0;i 0){
int i;
对于(i=0;i0)
我--;
参数setPreviewSize(psize.get(i).宽度,psize.get(i).高度);
//Log.i(“mr”,“ssize:+psize.get(i).width+”,“+psize.get(i).height”);
}
参数设置(“方向”、“肖像”);
mCamera.setParameters(参数);
mCamera.startPreview();
}
@凌驾
公共框架(GL10未使用){
GLES20.glClear(GLES20.GLU颜色缓冲位);
已同步(此){
如果(mUpdateST){
mSTexture.UpdateMaximage();
mUpdateST=false;
}
}
GLES20.glUseProgram(hProgram);
int ph=GLES20.glGetAttribLocation(hProgram,“vPosition”);
int tch=GLES20.glGetAttribLocation(hProgram,“vTexCoord”);
我
private final String fss =
    "#extension GL_OES_EGL_image_external : require\n" +
            "precision mediump float;\n" +
            "uniform samplerExternalOES sTexture;\n" +
            "varying vec2 texCoord;\n" +
            "void main() {\n" +
            "  vec4 tc = texture2D(sTexture, texCoord);\n" +
            "  float luminance = 0.3 * tc.r + 0.59 * tc.g + 0.11 * tc.b;\n" +
            "  gl_FragColor = vec4(luminance, luminance, luminance, 1.0);\n" +
            "}";