在android中使用Camera API以不同的焦距模式拍照
我需要创建一个应用程序,当我点击一张照片,它将保存3个JPEG是不同的聚焦模式(自动聚焦,宏聚焦和无限聚焦),如果可能的话,并保存它。我已经做了一个基本的摄像头应用程序,我正在发布代码。你们能告诉我如何实现这个吗在android中使用Camera API以不同的焦距模式拍照,android,camera,Android,Camera,我需要创建一个应用程序,当我点击一张照片,它将保存3个JPEG是不同的聚焦模式(自动聚焦,宏聚焦和无限聚焦),如果可能的话,并保存它。我已经做了一个基本的摄像头应用程序,我正在发布代码。你们能告诉我如何实现这个吗 public class MainActivity extends Activity implements Constants,SurfaceHolder.Callback,Camera.PreviewCallback, Camera.AutoFocusCallback, OnTou
public class MainActivity extends Activity implements Constants,SurfaceHolder.Callback,Camera.PreviewCallback,
Camera.AutoFocusCallback, OnTouchListener{
private ViewFlipper myviewflipper;
private float initialXpoint;
private Camera mCamera;
private SurfaceView mPreviewSV;
private SurfaceHolder mSurfaceHolder;
private Button mCapture;
private Button mChange;
private Button mReturn;
private Button mLoad;
private ImageView mImageView;
private int mPicturesCaptured = 0;
boolean isCaptureClicked = false;
boolean encodingCompleted = false;
long startTime;
int currentFile = 0;
int initialPointer = -1;
int secondPointer = -1;
int direction = -1;
CameraSize picSize = BACK_CAM_PREVIEW_SIZE_3 ;
File mFile;
ArrayList<String> imagePath;
ProgressDialog mEncodingProgress;
DisplayTask mTask;
Bitmap mDisplayBitmap;
File path;
File[] files;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
mFile = new File(Constants.Cameraeffect);
if (!mFile.exists())
mFile.mkdir();
setupUI();
}
private void setupUI()
{
mPreviewSV = (SurfaceView) findViewById(R.id.sv_cam_preview);
mCapture = (Button) findViewById(R.id.button_capture);
mChange = (Button) findViewById(R.id.toggle);
mReturn = (Button) findViewById(R.id.button_return);
mImageView = (ImageView) findViewById(R.id.display);
mImageView.setOnTouchListener(this);
mLoad = (Button) findViewById(R.id.button_load);
mSurfaceHolder = mPreviewSV.getHolder();
mSurfaceHolder.addCallback(this);
mCapture.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
deleteFiles();
mPicturesCaptured = 0;
isCaptureClicked = true;
startTime=System.currentTimeMillis();
progressUpdate();
}
});
mChange.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
int res = getCurrentCameraId();
switch(res){
case 0:
stopCamera();
startCamera(Constants.FRONT_CAMERA);
break;
case 1:
stopCamera();
startCamera(Constants.BACK_CAMERA);
break;
}
}
});
mLoad.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
imagePath=ListAllFiles();
mImageView.bringToFront();
mImageView.setVisibility(View.VISIBLE);
mImageView.setImageBitmap(decodeFile(imagePath.get(0)));
mTask = new DisplayTask();
mTask.execute("");
}
});
mReturn.setOnClickListener(new View.OnClickListener() {
public void onClick(View arg0) {
// TODO Auto-generated method stub
Intent intent = getIntent();
finish();
startActivity(intent);
}
});
}
private ArrayList<String> ListAllFiles() {
ArrayList<String> tFileList = new ArrayList<String>();
path= new File(Constants.Cameraeffect);
if( path.exists() ) {
files = path.listFiles();
for(int i=0; i<files.length; i++) {
tFileList.add(files[i].toString());
}
}
return tFileList;
}
public void deleteFiles() {
path= new File(Constants.Cameraeffect);
if( path.exists() ) {
files = path.listFiles();
for(int i=0; i<files.length; i++) {
files[i].delete();
}
}
}
@SuppressWarnings("unused")
int getCurrentCameraId() {
int mCameraId = -1;
CameraInfo ci = new CameraInfo();
for (int i = 0 ; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK){
return Constants.BACK_CAMERA;
}
else if (ci.facing == CameraInfo.CAMERA_FACING_FRONT){
return Constants.FRONT_CAMERA;
}
}
return -1;
}
private void progressUpdate()
{
mEncodingProgress = new ProgressDialog(this);
mEncodingProgress.setProgressStyle(ProgressDialog.STYLE_SPINNER);
mEncodingProgress.setMessage("Encoding");
mEncodingProgress.setIndeterminate(false);
mEncodingProgress.show();
new Thread(new Runnable()
{
public void run()
{
while((System.currentTimeMillis()-startTime)<=5000)
{
try
{
Thread.sleep(10);
mEncodingProgress.setProgress(mPicturesCaptured + 1);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
}
}
}).start();
}
@SuppressWarnings("unused")
private void startCamera(int id)
{
try
{
if (mCamera != null)
stopCamera();
mCamera = Camera.open(id);
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(1280,720);
parameters.setFocusMode("infinite");
parameters.setJpegQuality(100);
parameters.setAutoExposureLock(true);
parameters.setAutoWhiteBalanceLock(true);
parameters.setRotation(0);
mCamera.setDisplayOrientation(0);
mCamera.setParameters(parameters);
mCamera.setPreviewCallback(this);
mCamera.setPreviewDisplay(mPreviewSV.getHolder());
mCamera.startPreview();
}
catch (IOException e)
{
stopCamera();
}
}
private void stopCamera()
{
if (mCamera != null)
{
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mPreviewSV.getHolder().removeCallback(this);
mCamera.release();
mCamera = null;
}
}
@Override
protected void onStart()
{
super.onStart();
}
@Override
protected void onResume()
{
super.onResume();
}
@Override
protected void onPause()
{
stopCamera();
super.onPause();
}
@Override
protected void onDestroy()
{
stopCamera();
super.onDestroy();
}
public void surfaceCreated(SurfaceHolder holder)
{
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
startCamera(0);
}
public void surfaceDestroyed(SurfaceHolder holder)
{
}
public void onPreviewFrame(byte[] data, Camera camera)
{
if (System.currentTimeMillis()-startTime<=5000)
{
savingJpegToSDCard(data);
}
else
{
if(mEncodingProgress!=null){
mEncodingProgress.dismiss();
mEncodingProgress=null;
encodingCompleted=true;
}
}
if(encodingCompleted){
/*
* Log.i("","sent to JNI");
*/
encodingCompleted=false;
}
}
public void onAutoFocus(boolean arg0, Camera arg1) {
// TODO Auto-generated method stub
}
public boolean onTouch(View arg0, MotionEvent event) {
Log.d("","srikrishna:: ontouch");
switch(event.getAction()){
case(MotionEvent.ACTION_DOWN):
initialPointer=(int) event.getX();
break;
case(MotionEvent.ACTION_MOVE):
secondPointer=(int) event.getX();
break;
case(MotionEvent.ACTION_UP):
initialPointer=(int) event.getX();
}
return true;
}
public Bitmap decodeFile(String fPath)
{
BitmapFactory.Options opts = new BitmapFactory.Options();
opts.inJustDecodeBounds = true;
opts.inDither = false;
opts.inPurgeable = true;
opts.inInputShareable = true;
BitmapFactory.decodeFile(fPath, opts);
final int REQUIRED_SIZE = 500;
int scale = 1;
if (opts.outHeight > REQUIRED_SIZE || opts.outWidth > REQUIRED_SIZE)
{
final int heightRatio = Math
.round((float) opts.outHeight / (float) REQUIRED_SIZE);
final int widthRatio = Math
.round((float) opts.outWidth / (float) REQUIRED_SIZE);
scale = heightRatio < widthRatio ? heightRatio : widthRatio;//
}
opts.inJustDecodeBounds = false;
opts.inSampleSize = scale;
Bitmap bm = BitmapFactory.decodeFile(fPath, opts).copy(
Bitmap.Config.RGB_565, false);
return bm;
}
private void savingJpegToSDCard(final byte[] b1)
{
try{
Camera.Parameters parameters = mCamera.getParameters();
Size size = parameters.getPreviewSize();
Log.i("","abh size width and height "+size.width + " "+size.height);
YuvImage image = new YuvImage(b1, parameters.getPreviewFormat(),size.width, size.height, null);
File file = new File(Constants.Cameraeffect + "/File" + mPicturesCaptured + ".jpeg");
FileOutputStream filecon = new FileOutputStream(file);
image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90,filecon);
}
catch (FileNotFoundException e) {
e.printStackTrace();
}
mPicturesCaptured++;
}
class DisplayTask extends AsyncTask<String, Bitmap, Void>
{
@Override
protected void onPreExecute()
{
super.onPreExecute();
currentFile = 0;
}
@Override
protected Void doInBackground(String... params)
{
while(true){
Log.i("","abh imagePath.size() "+imagePath.size());
if(secondPointer-initialPointer >50)
{
Log.i("","abh currentFile "+currentFile);
mDisplayBitmap = decodeFile(imagePath.get(currentFile));
publishProgress(mDisplayBitmap);
try
{
Thread.sleep(10);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
if(currentFile<=90)
currentFile++;
initialPointer=secondPointer;
}
}
}
@Override
protected void onPostExecute(Void result)
{
super.onPostExecute(result);
}
@Override
protected void onProgressUpdate(Bitmap... values)
{
super.onProgressUpdate(values);
mImageView.setImageBitmap(values[0]);
}
}
public类MainActivity扩展活动实现常量、SurfaceHolder.Callback、Camera.PreviewCallback、,
摄像头。自动聚焦回显,OnTouchListener{
私有ViewFlipper myviewflipper;
私有float initialXpoint;
私人摄像机麦卡梅拉;
私人SurfaceView mPreviewSV;
私人地表持有人;
专用按钮;
专用按钮mChange;
专用按钮;
专用按钮加载;
私有图像视图mImageView;
private int mPicturesCaptured=0;
布尔值isCaptureClicked=false;
布尔编码完成=假;
长启动时间;
int currentFile=0;
int initialPointer=-1;
int secondPointer=-1;
int方向=-1;
CameraSize picSize=背面\摄像头\预览\大小\ 3;
文件文件;
阵列列表图像路径;
进展对话和编码进展;
显示任务mTask;
位图显示位图;
文件路径;
文件[]文件;
@凌驾
创建时受保护的void(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
requestWindowFeature(窗口。功能\u无\u标题);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_全屏,WindowManager.LayoutParams.FLAG_全屏);
setContentView(R.layout.activity_main);
mFile=新文件(Constants.camerafeffect);
如果(!mFile.exists())
mFile.mkdir();
setupUI();
}
私有void setupUI()
{
mPreviewSV=(SurfaceView)findViewById(R.id.sv\u cam\u预览);
mCapture=(按钮)findviewbyd(R.id.Button\u捕获);
mChange=(按钮)findViewById(R.id.toggle);
mReturn=(按钮)findviewbyd(R.id.Button\u返回);
mImageView=(ImageView)findviewbyd(R.id.display);
setOnTouchListener(这个);
mLoad=(按钮)findViewById(R.id.Button\u load);
mSurfaceHolder=mPreviewSV.getHolder();
mSurfaceHolder.addCallback(此);
mCapture.setOnClickListener(新视图.OnClickListener()
{
公共void onClick(视图v)
{
删除文件();
mPicturesCaptured=0;
isCaptureClicked=true;
startTime=System.currentTimeMillis();
progressUpdate();
}
});
mChange.setOnClickListener(新视图.OnClickListener()
{
公共void onClick(视图v)
{
int res=getCurrentCameraId();
开关(res){
案例0:
停止摄影机();
startCamera(常数前摄像头);
打破
案例1:
停止摄影机();
startCamera(常数后摄像头);
打破
}
}
});
mLoad.setOnClickListener(新视图.OnClickListener()
{
公共void onClick(视图v)
{
imagePath=ListAllFiles();
mImageView.bringToFront();
mImageView.setVisibility(View.VISIBLE);
setImageBitmap(解码文件(imagePath.get(0));
mTask=新的显示任务();
mTask.execute(“”);
}
});
mReturn.setOnClickListener(新视图.OnClickListener(){
公共void onClick(视图arg0){
//TODO自动生成的方法存根
Intent=getIntent();
完成();
星触觉(意向);
}
});
}
私有ArrayList ListAllFiles(){
ArrayList tFileList=新的ArrayList();
路径=新文件(Constants.camerafeffect);
if(path.exists()){
files=path.listFiles();
对于(int i=0;i在“启动摄影机”方法中,将“聚焦”模式设置为所需模式,然后启动摄影机。如果要在单击按钮时动态更改它,则必须使用所需属性调用“启动摄影机”方法。确保释放摄影机[这非常重要]还要注意的是,android中的定制摄像头实现有很多缺陷[许多缺陷仍然存在,只要看一眼就知道了]我建议您仔细阅读并实施它。我不想在点击按钮时改变焦距,我想要的是在点击“捕获”按钮时,3张照片将在三种不同的焦距模式下同时捕获!如果在该部分可以捕获图像,是否有焦距回调方法将返回true垂直焦点?如果可能的话,我会使用它,并使用开关盒在不同的焦点之间切换。。!