Android 数据时间戳

Android 数据时间戳,android,csv,image-processing,timestamp,Android,Csv,Image Processing,Timestamp,我正在开发一个应用程序,其中我做一些实时图像处理。在我的相机视图中,我在预览帧中进行图像处理,在预览帧中循环每个像素,然后找到每个帧的Y值总和。然后我把它保存到csv文件中,一切都很完美。除此之外,我还想存储csv中每个帧之间经过的时间以及每个y和 图像处理 public abstract class ImageProcessing { public static int YUV420SPtoYSum(byte[] yuv420sp, int width, int height){

我正在开发一个应用程序,其中我做一些实时图像处理。在我的相机视图中,我在预览帧中进行图像处理,在预览帧中循环每个像素,然后找到每个帧的Y值总和。然后我把它保存到csv文件中,一切都很完美。除此之外,我还想存储csv中每个帧之间经过的时间以及每个y和

图像处理

public abstract class ImageProcessing {

 public static int YUV420SPtoYSum(byte[] yuv420sp, int width, int height){

    if(yuv420sp == null)
        return 0;

    int sum = 0;
    final int ii = 0;
    final int ij = 0;
    final int di = +1;
    final int dj = +1;
    int y = 0;
    for (int i = 0, ci = ii; i < height; ++i, ci += di) {
        for (int j = 0, cj = ij; j < width; ++j, cj += dj) {
            y = (0xff & ((int) yuv420sp[ci * width + cj]));
            //y = y < 16 ? 16 : y;

            sum += y;
        }
    }
  return sum;
 }
}
公共抽象类图像处理{
公共静态int-YUV420SPtoYSum(字节[]yuv420sp,int-width,int-height){
if(yuv420sp==null)
返回0;
整数和=0;
最终int ii=0;
最终int ij=0;
最终整数di=+1;
最终整数dj=+1;
int y=0;
对于(int i=0,ci=ii;i
CameraView类

public class CameraView extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {

private static final String TAG = "CameraView";
Camera.Size mPreviewSize;
List<Camera.Size> mSupportedPreviewSizes;
private SurfaceHolder mHolder;
private Camera mCamera;
int img_Y_Avg, img_U_Avg, img_V_Avg;


public interface PreviewReadyCallback {
    void onPreviewFrame(int yAverage, int uAverage, int vAverage); // Any value you want to get
}

PreviewReadyCallback mPreviewReadyCallback = null;

public void setOnPreviewReady(PreviewReadyCallback cb) {
    mPreviewReadyCallback = cb;
}



public CameraView(Context context, Camera camera){
    super(context);
    mCamera = camera;

    //mCamera.setDisplayOrientation(90);
    mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
    for(Camera.Size str: mSupportedPreviewSizes)
        Log.e(TAG, str.width + "/" + str.height);

    mHolder = getHolder();
    mHolder.addCallback(this);
    mHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}

@Override
public void surfaceCreated(SurfaceHolder surfaceHolder){
    try{
        mCamera.setPreviewDisplay(surfaceHolder);
        mCamera.startPreview();
    }catch(Exception e){
        Log.d("ERROR","Camera error on SurfaceCreated" + e.getMessage());
    }
}

@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i2, int i3) {

    if(mHolder.getSurface() == null)
        return;
    try{
        mCamera.stopPreview();
    }catch(Exception e) {
        Log.d("ERROR","Camera error on SurfaceChanged" + e.getMessage());
    }
    try {
        Camera.Parameters parameters = mCamera.getParameters();

        parameters.setPreviewSize(176, 144);
        mCamera.cancelAutoFocus();
        //parameters.setAutoExposureLock(false);
        mCamera.setDisplayOrientation(90);
        //set fps
        parameters.setPreviewFpsRange(16000, 16000);
        //on flash
        parameters.setFlashMode(parameters.FLASH_MODE_AUTO);
        //parameters.setAutoWhiteBalanceLock(true);
        parameters.setPreviewFormat(ImageFormat.NV21);

        /*if (parameters.getMaxNumMeteringAreas() > 0){ // check that metering areas are supported
            List<Camera.Area> meteringAreas = new ArrayList<Camera.Area>();

            Rect areaRect1 = new Rect(-50, -50, 50, 50);    // specify an area in center of image
            meteringAreas.add(new Camera.Area(areaRect1, 1000)); // set weight to 60%
            parameters.setMeteringAreas(meteringAreas);
        }*/



        //mCamera.setDisplayOrientation(90);
        mCamera.setParameters(parameters);
        mCamera.setPreviewDisplay(mHolder);
        mCamera.setPreviewCallback(this);
        mCamera.startPreview();

    } catch (IOException e) {
        Log.d("ERROR","Camera error on SurfaceChanged" + e.getMessage());
    }
}



@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
    if (mCamera != null){
        //mCamera.stopPreview();
        //mCamera.release();
    }

}

@Override
public void onPreviewFrame(byte[] data, Camera camera){

    //check if data is null
    if (data == null)
        throw new NullPointerException();

    Camera.Size size = camera.getParameters().getPreviewSize();

    //check if size is null
    if(size == null)
        throw new NullPointerException();


    //set resolution of camera view to optimal setting
    int width = size.width;
    int height = size.height;
    Log.d("Resolution ", " "+String.valueOf(width)+" "+String.valueOf(height));

    //call ImageProcess on the data to decode YUV420SP to RGB
    img_Y_Avg = ImageProcessing.YUV420SPtoYSum(data, width, height);
    img_U_Avg = ImageProcessing.YUV420SPtoUSum(data, width, height);
    img_V_Avg = ImageProcessing.YUV420SPtoVSum(data, width, height);

    mPreviewReadyCallback.onPreviewFrame(img_Y_Avg, img_U_Avg, img_V_Avg);

}




@Override
protected  void onMeasure(int widthMeasureSpec, int heightMeasureSpec){
    final int width = resolveSize(getSuggestedMinimumWidth(),widthMeasureSpec);
    final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
    setMeasuredDimension(width, height);

    if(mSupportedPreviewSizes != null){
        mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
        Log.d("Resolution ", " "+mPreviewSize);
    }
}


private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h){
    final double ASPECT_TOLERANCE = 0.1;
    double targetRatio = (double) h / w;

    if (sizes == null) return null;

    Camera.Size optimalSize = null;
    double minDiff = Double.MAX_VALUE;

    int targetHeight = h;

    for (Camera.Size size : sizes){
        double ratio = (double) size.width  / size.height;
        if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
        if (Math.abs(size.height - targetHeight) < minDiff){
            optimalSize = size;
            minDiff = Math.abs(size.height - targetHeight);
        }
    }

    if (optimalSize == null){
        minDiff = Double.MIN_VALUE;
        for (Camera.Size size : sizes){
            if (Math.abs(size.height - targetHeight) < minDiff){
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }
    }
    return optimalSize;
}
}
公共类CameraView扩展了SurfaceView,实现了SurfaceHolder.Callback、Camera.PreviewCallback{
私有静态最终字符串TAG=“CameraView”;
相机。大小mPreviewSize;
列出MSSupportedPreviewsizes;
私人地勤人员;
私人摄像机麦卡梅拉;
国际平均价格、国际平均价格、国际平均价格、国际平均价格;
公共接口预览和回调{
void onPreviewFrame(int-yAverage,int-uAverage,int-vAverage);//要获取的任何值
}
PreviewerAyCallback MPReviewerAyCallback=null;
public void setonprevieverady(previeverady回调cb){
mPreviewReadyCallback=cb;
}
公共摄影机视图(上下文、摄影机){
超级(上下文);
mCamera=摄像机;
//mCamera.setDisplayOrientation(90);
mSupportedPreviewSizes=mCamera.getParameters().getSupportedPreviewSizes();
用于(Camera.Size str:mSupportedPreviewSizes)
Log.e(标签,str.width+“/”+str.height);
mHolder=getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
@凌驾
已创建的公共空隙表面(表面层表面层){
试一试{
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
}捕获(例外e){
Log.d(“错误”,“表面上的摄像头错误已创建”+e.getMessage());
}
}
@凌驾
公共无效表面更改(表面更改表面更改表面更改,int i,int i2,int i3){
if(mHolder.getSurface()==null)
返回;
试一试{
mCamera.stopPreview();
}捕获(例外e){
Log.d(“错误”,“表面上的摄像头错误已更改”+e.getMessage());
}
试一试{
Camera.Parameters=mCamera.getParameters();
setPreviewSize(176144);
mCamera.cancelAutoFocus();
//参数setAutoExposureLock(false);
mCamera.setDisplayOrientation(90);
//设置fps
参数。setPreviewFpsRange(16000,16000);
//一闪而过
parameters.setFlashMode(parameters.FLASH\u MODE\u AUTO);
//参数setAutoWhiteBalanceLock(true);
setPreviewFormat(ImageFormat.NV21);
/*如果(parameters.getMaxNumMeteringAreas()>0){//请检查是否支持计量区域
List meteringAreas=new ArrayList();
Rect areaRect1=new Rect(-50,-50,50,50);//指定图像中心的区域
计量面积。添加(新摄像头。面积(areaRect1,1000));//将权重设置为60%
参数。设置计量区域(计量区域);
}*/
//mCamera.setDisplayOrientation(90);
mCamera.setParameters(参数);
mCamera.setPreviewDisplay(mHolder);
mCamera.setPreviewCallback(此);
mCamera.startPreview();
}捕获(IOE异常){
Log.d(“错误”,“表面上的摄像头错误已更改”+e.getMessage());
}
}
@凌驾
公共空间表面已覆盖(表面层表面层){
if(mCamera!=null){
//mCamera.stopPreview();
//mCamera.release();
}
}
@凌驾
预览帧上的公共无效(字节[]数据,摄像头){
//检查数据是否为空
如果(数据==null)
抛出新的NullPointerException();
Camera.Size Size=Camera.getParameters().getPreviewSize();
//检查大小是否为空
如果(大小==null)
抛出新的NullPointerException();
//将相机视图的分辨率设置为最佳设置
int width=size.width;
int height=size.height;
Log.d(“分辨率”、“”+字符串.valueOf(宽度)+“”+字符串.valueOf(高度));
//调用数据上的ImageProcess将YUV420SP解码为RGB
img_Y_Avg=ImageProcessing.YUV420SPtoYSum(数据、宽度、高度);
img___Avg=图像处理.YUV420SPtoUSum(数据、宽度、高度);
img_V_Avg=ImageProcessing.YUV420SPtoVSum(数据、宽度、高度);
mprevieweradycallback.onPreviewFrame(img_Y_平均值、img__平均值、img__平均值、img_V_平均值);
}
@凌驾
测量时的保护空隙(内部宽度测量等级、内部高度测量等级){
最终整数宽度=resolveSize(getSuggestedMinimumWidth(),widthMeasureSpec);
最终整数高度=resolveSize(getSuggestedMinimumHeight(),heightMeasureSpec);
设置测量尺寸(宽度、高度);
if(mSupportedPreviewSizes!=null){
mPreviewSize=getOptimalPreviewSize(MSSupportedPreviewSize、宽度、高度);
Log.d(“决议”,“mPreviewSize”);
}
}
私人相机。大小getOptimalPreviewSize(列表大小,整数w,整数h){
最终双纵横比公差=0.1;
双目标率=(双)高/低;
如果(size==null)返回null;
照相机。大小优化大小=空;
double minDiff=double.MAX_值;
int targetHeight=h;
用于(相机尺寸:尺寸){
双倍比率=(双倍)size.width/size.height;
如果(数学abs(比率-目标比率)>纵横比公差)继续;
if(数学绝对值(尺寸高度-目标光)public class MainActivity extends AppCompatActivity implements CameraView.PreviewReadyCallback {
private static Camera camera = null;
private CameraView image = null;

private LineChart bp_graph;
private static int img_Y_Avg = 0, img_U_Avg = 0, img_V_Avg = 0;
double valueY, valueU, valueV;
Handler handler;
private int readingRemaining = 600;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    bp_graph = (LineChart)findViewById(R.id.graph);


    graph_features();

    //open camera
    try {
        camera = Camera.open();

        handler = new Handler();
        final Runnable runnable = new Runnable() {
            @Override
            public void run() {
                camera.stopPreview();
                camera.release();
            }
        };
        handler.postDelayed(runnable, 30000);

    } catch (Exception e) {
        Log.d("ERROR", "Failed to get camera: " + e.getMessage());
    }

    if (camera != null) {
        image = new CameraView(this, camera);
        FrameLayout camera_view = (FrameLayout) findViewById(R.id.camera_view);
        camera_view.addView(image);
        image.setOnPreviewReady(this);
    }

    //close camera button
    ImageButton imgClose = (ImageButton) findViewById(R.id.imgClose);
    imgClose.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View view) {
            System.exit(0);
        }
    });

}


@Override
protected void onResume(){
    super.onResume();
}

@Override
protected void onPause() {
    super.onPause();
}

private void graph_features(){
    bp_graph.getDescription().setEnabled(false);


    //enable touch gesture
    bp_graph.setTouchEnabled(true);

    //enable scaling
    bp_graph.setDragEnabled(true);

    //scale and drag
    bp_graph.setScaleEnabled(true);
    bp_graph.setDrawGridBackground(false);

    //enable pinch zoom in
    bp_graph.setPinchZoom(true);

    //alternative background color
    bp_graph.setBackgroundColor(Color.LTGRAY);

    //work on data
    LineData lineData = new LineData();
    lineData.setValueTextColor(Color.WHITE);

    //add data to line chart
    bp_graph.setData(lineData);

    //animate
    bp_graph.animateX(600);

    Legend legend = bp_graph.getLegend();

    //custom legend
    legend.setForm(Legend.LegendForm.LINE);
    legend.setTextColor(Color.WHITE);

    XAxis x1 = bp_graph.getXAxis();
    x1.setTextColor(Color.WHITE);
    x1.setDrawGridLines(false);
    x1.setAvoidFirstLastClipping(true);
    x1.setPosition(XAxis.XAxisPosition.BOTTOM);

    YAxis y1 = bp_graph.getAxisLeft();
    y1.setTextColor(Color.WHITE);


    y1.setAxisMaximum(5000000);
    y1.setAxisMinimum(100000);
    y1.setDrawGridLines(true);
    //y1.setInverted(true);

    YAxis y2 = bp_graph.getAxisRight();
    y2.setEnabled(false);
}


//method to create set
  private ILineDataSet createSet() {
      LineDataSet set = new LineDataSet(null, "PPG");

      set.setLineWidth(1.0f);
      set.setCircleRadius(1.0f);
      set.setColor(Color.rgb(240, 99, 99));
      set.setCircleColor(Color.rgb(240, 99, 99));
      set.setHighLightColor(Color.rgb(190, 190, 190));
      set.setAxisDependency(YAxis.AxisDependency.LEFT);
      set.setValueTextSize(1.0f);
      return set;
}

@Override
public void onPreviewFrame(int ySum, int uSum, int vSum) {
    img_Y_Avg = ySum;
    img_U_Avg = uSum;
    img_V_Avg = vSum;

    //set value of Y on the text view
    TextView valueOfY = (TextView)findViewById(R.id.valueY);
    valueY = img_Y_Avg;
    valueOfY.setText(Double.toString(img_Y_Avg));

    //set value of U on the text view
    TextView valueOfU = (TextView)findViewById(R.id.valueU);
    valueU = img_U_Avg;
    valueOfU.setText(Double.toString(img_U_Avg));

    //set value of V on the text view
    TextView valueOfV = (TextView)findViewById(R.id.valueV);
    valueV = img_V_Avg;
    valueOfV.setText(Double.toString(img_V_Avg));

    //store value to array list
    ArrayList<Integer> yAverage = new ArrayList<Integer>();
    yAverage.add(img_Y_Avg);
    //Log.d("MyEntryData", String.valueOf(yAverage));

    //store u values to array
    ArrayList<Integer> uAverage = new ArrayList<Integer>();
    uAverage.add(img_U_Avg);
    //Log.d("MyEntryData", String.valueOf(uAverage));

    //store u values to array
    ArrayList<Integer> vAverage = new ArrayList<Integer>();
    vAverage.add(img_V_Avg);
    //Log.d("MyEntryData", String.valueOf(vAverage));

    float start = System.nanoTime();
    int diff = (int) ((System.currentTimeMillis()/1000) - start);

    ArrayList<Integer> difference = new ArrayList<Integer>();
    difference.add(diff);

    Log.d("time", String.valueOf(start));


    ArrayList<Integer> getValues = new ArrayList<Integer>();
    for(int i = 0; i < uAverage.size(); i++) {
        //getValues.add(difference.get(i));
        getValues.add(yAverage.get(i));
        getValues.add(uAverage.get(i));
        getValues.add(vAverage.get(i));
    }


    String filename = new SimpleDateFormat("yyyyMMddHHmm'.csv'").format(new Date());

    File directoryDownload = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS);
    File logDir = new File (directoryDownload, "bpReader"); //Creates a new folder in DOWNLOAD directory
    logDir.mkdirs();
    File file = new File(logDir, filename);

    FileOutputStream outputStream = null;
    try {
           outputStream = new FileOutputStream(file, true);
           //outputStream = openFileOutput(filename, Context.MODE_PRIVATE);
           for (int i = 0; i < uAverage.size(); i += 3) {
               //outputStream.write((getValues.get(i) + ",").getBytes());
               outputStream.write((getValues.get(i) + ",").getBytes());
               outputStream.write((getValues.get(i + 1) + ",").getBytes());
               outputStream.write((getValues.get(i + 2) + "\n").getBytes());

            }
            outputStream.close();
        } catch (Exception e) {
            e.printStackTrace();
        }

    Log.d("MyEntryData", String.valueOf(getValues));


    handler = new Handler();
    final Runnable runnable = new Runnable() {
        @Override
        public void run() {
            readingRemaining = readingRemaining -1;

            if (readingRemaining > 0){
                plotGraph(img_Y_Avg);
                //plotGraph(img_U_Avg);
                //plotGraph(img_V_Avg);
            }
        }
    };

    handler.postDelayed(runnable, 100);

    //Log.d("MyEntryData", String.valueOf(img_Y_Avg +" "+ img_U_Avg+" "+img_V_Avg));

}

private void plotGraph(double graph_data){
    LineData data = bp_graph.getData();
    if (data != null){
        ILineDataSet set = data.getDataSetByIndex(0);

        if (set == null){
            set = createSet();
            data.addDataSet(set);
        }

        //add a new value
        int randomDataSetIndex = (int) (Math.random() * data.getDataSetCount());
        float yValue = (float)  graph_data;


        data.addEntry(new Entry(data.getDataSetByIndex(randomDataSetIndex).getEntryCount(), yValue), randomDataSetIndex);

        //notify chart data have changed
        bp_graph.notifyDataSetChanged();

        bp_graph.setVisibleXRangeMaximum(100);

        //scroll to last entry
        bp_graph.moveViewTo(data.getEntryCount() - 7, 50f, YAxis.AxisDependency.RIGHT);
    }
}}
long startTime = System.currentTimeMillis();

img_Y_Avg = ImageProcessing.YUV420SPtoYSum(data, width, height);
img_U_Avg = ImageProcessing.YUV420SPtoUSum(data, width, height);
img_V_Avg = ImageProcessing.YUV420SPtoVSum(data, width, height);

long finishTime = System.currentTimeMillis();


mPreviewReadyCallback.onPreviewFrame(img_Y_Avg, img_U_Avg, img_V_Avg,finishTime-startTime);
public interface PreviewReadyCallback {
    void onPreviewFrame(int yAverage, int uAverage, int vAverage, long time);
}
long oldTime=System.currentTimeMillis();
img_Y_Avg = ImageProcessing.YUV420SPtoYSum(data, width, height);
img_U_Avg = ImageProcessing.YUV420SPtoUSum(data, width, height);
img_V_Avg = ImageProcessing.YUV420SPtoVSum(data, width, height);

long newTime = System.currentTimeMillis();

mPreviewReadyCallback.onPreviewFrame(img_Y_Avg, img_U_Avg, img_V_Avg,newTime-oldTime);
oldTime=newTime;