Android 无法在颤振相机上拍照,因为;“未配置表面”;
我想捕获图像并显示一个预览屏幕,显示捕获的图像。它以前工作得很好,但一旦我添加了对象检测,每次我尝试拍照时都会收到一组错误。我认为目标检测干扰了捕获Android 无法在颤振相机上拍照,因为;“未配置表面”;,android,flutter,dart,android-camera,android-camera2,Android,Flutter,Dart,Android Camera,Android Camera2,我想捕获图像并显示一个预览屏幕,显示捕获的图像。它以前工作得很好,但一旦我添加了对象检测,每次我尝试拍照时都会收到一组错误。我认为目标检测干扰了捕获 I/flutter (12681): /data/user/0/com.example.iSPY_Cancer/cache/2020-07-26 14:11:55.456052.png E/CameraDevice-0-LE(12681): submitRequestList - cannot use a surface that wasn't c
I/flutter (12681): /data/user/0/com.example.iSPY_Cancer/cache/2020-07-26 14:11:55.456052.png
E/CameraDevice-0-LE(12681): submitRequestList - cannot use a surface that wasn't configured
E/MethodChannel#plugins.flutter.io/camera(12681): Failed to handle method call
E/MethodChannel#plugins.flutter.io/camera(12681): java.lang.IllegalArgumentException: Bad argument passed to camera service
E/MethodChannel#plugins.flutter.io/camera(12681): at android.hardware.camera2.utils.CameraBinderDecorator.throwOnError(CameraBinderDecorator.java:114)
E/MethodChannel#plugins.flutter.io/camera(12681): at android.hardware.camera2.utils.CameraBinderDecorator$CameraBinderDecoratorListener.onAfterInvocation(CameraBinderDecorator.java:73)
E/MethodChannel#plugins.flutter.io/camera(12681): at android.hardware.camera2.utils.Decorator.invoke(Decorator.java:81)
E/MethodChannel#plugins.flutter.io/camera(12681): at java.lang.reflect.Proxy.invoke(Proxy.java:393)
E/MethodChannel#plugins.flutter.io/camera(12681): at $Proxy1.submitRequestList(Unknown Source)
E/MethodChannel#plugins.flutter.io/camera(12681): at android.hardware.camera2.impl.CameraDeviceImpl.submitCaptureRequest(CameraDeviceImpl.java:853)
E/MethodChannel#plugins.flutter.io/camera(12681): at android.hardware.camera2.impl.CameraDeviceImpl.capture(CameraDeviceImpl.java:741)
E/MethodChannel#plugins.flutter.io/camera(12681): at android.hardware.camera2.impl.CameraCaptureSessionImpl.capture(CameraCaptureSessionImpl.java:179)
E/MethodChannel#plugins.flutter.io/camera(12681): at io.flutter.plugins.camera.Camera.takePicture(Camera.java:253)
E/MethodChannel#plugins.flutter.io/camera(12681): at io.flutter.plugins.camera.MethodCallHandlerImpl.onMethodCall(MethodCallHandlerImpl.java:77)
E/MethodChannel#plugins.flutter.io/camera(12681): at io.flutter.plugin.common.MethodChannel$IncomingMethodCallHandler.onMessage(MethodChannel.java:226)
E/MethodChannel#plugins.flutter.io/camera(12681): at io.flutter.embedding.engine.dart.DartMessenger.handleMessageFromDart(DartMessenger.java:85)
E/MethodChannel#plugins.flutter.io/camera(12681): at io.flutter.embedding.engine.FlutterJNI.handlePlatformMessage(FlutterJNI.java:631)
E/MethodChannel#plugins.flutter.io/camera(12681): at android.os.MessageQueue.nativePollOnce(Native Method)
E/MethodChannel#plugins.flutter.io/camera(12681): at android.os.MessageQueue.next(MessageQueue.java:323)
E/MethodChannel#plugins.flutter.io/camera(12681): at android.os.Looper.loop(Looper.java:141)
E/MethodChannel#plugins.flutter.io/camera(12681): at android.app.ActivityThread.main(ActivityThread.java:5603)
E/MethodChannel#plugins.flutter.io/camera(12681): at java.lang.reflect.Method.invoke(Native Method)
E/MethodChannel#plugins.flutter.io/camera(12681): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:774)
E/MethodChannel#plugins.flutter.io/camera(12681): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:652)
I/flutter (12681): CameraException(error, Bad argument passed to camera service)
下面是图像捕获的代码。如错误的第一行所示,图像路径
已成功打印,这意味着通向打印(路径)
的代码已成功打印,但其余代码不起作用。如果有人以前遇到过这些错误,请帮助我。我真的被卡住了
void _onCapturePressed(context) async {
// Take the Picture in a try / catch block. If anything goes wrong,
// catch the error.
try {
// Attempt to take a picture and log where it's been saved
final path = join(
// In this example, store the picture in the temp directory. Find
// the temp directory using the `path_provider` plugin.
(await getTemporaryDirectory()).path,
'${DateTime.now()}.png',
);
print(path);
await controller.takePicture(path);
// If the picture was taken, display it on a new screen
Navigator.push(
context,
MaterialPageRoute(
builder: (context) => PreviewImageScreen(imagePath: path),
),
);
} catch (e) {
// If an error occurs, log the error to the console.
print(e);
}
}
这是带有目标检测的整个屏幕的代码:
import 'package:camera/camera.dart';
import 'package:path/path.dart';
import 'package:path_provider/path_provider.dart';
import 'package:tflite/tflite.dart';
import 'dart:math' as math;
import '../preview_screen.dart';
import 'models.dart';
typedef void Callback(List<dynamic> list, int h, int w);
class Camera extends StatefulWidget {
final List<CameraDescription> cameras;
final Callback setRecognitions;
final String model;
Camera(this.cameras, this.model, this.setRecognitions);
@override
_CameraState createState() => new _CameraState();
}
class _CameraState extends State<Camera> {
double xPosition = 120;
double yPosition = 150;
List cameras;
int selectedCameraIdx;
String imagePath;
CameraController controller;
bool isDetecting = false;
@override
void initState() {
super.initState();
if (widget.cameras == null || widget.cameras.length < 1) {
print(widget.cameras);
print('No camera is found');
} else {
controller = new CameraController(
widget.cameras[0],
ResolutionPreset.high,
);
controller.initialize().then((_) {
if (!mounted) {
return;
}
setState(() {});
controller.startImageStream((CameraImage img) {
if (!isDetecting) {
isDetecting = true;
int startTime = new DateTime.now().millisecondsSinceEpoch;
if (widget.model == mobilenet) {
Tflite.runModelOnFrame(
bytesList: img.planes.map((plane) {
return plane.bytes;
}).toList(),
imageHeight: img.height,
imageWidth: img.width,
numResults: 2,
).then((recognitions) {
int endTime = new DateTime.now().millisecondsSinceEpoch;
print("Detection took ${endTime - startTime}");
widget.setRecognitions(recognitions, img.height, img.width);
isDetecting = false;
});
} else if (widget.model == posenet) {
Tflite.runPoseNetOnFrame(
bytesList: img.planes.map((plane) {
return plane.bytes;
}).toList(),
imageHeight: img.height,
imageWidth: img.width,
numResults: 2,
).then((recognitions) {
int endTime = new DateTime.now().millisecondsSinceEpoch;
print("Detection took ${endTime - startTime}");
widget.setRecognitions(recognitions, img.height, img.width);
isDetecting = false;
});
} else {
Tflite.detectObjectOnFrame(
bytesList: img.planes.map((plane) {
return plane.bytes;
}).toList(),
model: widget.model == yolo ? "YOLO" : "SSDMobileNet",
imageHeight: img.height,
imageWidth: img.width,
imageMean: widget.model == yolo ? 0 : 127.5,
imageStd: widget.model == yolo ? 255.0 : 127.5,
numResultsPerClass: 1,
threshold: widget.model == yolo ? 0.2 : 0.4,
).then((recognitions) {
int endTime = new DateTime.now().millisecondsSinceEpoch;
print("Detection took ${endTime - startTime}");
widget.setRecognitions(recognitions, img.height, img.width);
isDetecting = false;
});
}
}
});
});
}
}
@override
void dispose() {
controller?.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
if (controller == null || !controller.value.isInitialized) {
return Container();
}
var tmp = MediaQuery.of(context).size;
var screenH = math.max(tmp.height, tmp.width);
var screenW = math.min(tmp.height, tmp.width);
tmp = controller.value.previewSize;
var previewH = math.max(tmp.height, tmp.width);
var previewW = math.min(tmp.height, tmp.width);
var screenRatio = screenH / screenW;
var previewRatio = previewH / previewW;
return Scaffold(
appBar: AppBar(
backgroundColor: Colors.lightBlue,
centerTitle: true,
elevation: 5,
title: Text("Pathomatic")),
body: Container(
decoration: new BoxDecoration(color: Colors.black),
child: SafeArea(
child: Column(
crossAxisAlignment: CrossAxisAlignment.stretch,
children: <Widget>[
Expanded(
flex: 2,
child: new Stack(
children: <Widget>[
new Container(
alignment: Alignment.center,
child: _cameraPreviewWidget(context),
),
new Align(
alignment: Alignment.center,
child: GestureDetector(
child: Stack(children: <Widget>[
Positioned(
top: yPosition,
left: xPosition,
child: Container(
// padding: EdgeInsets.only(top: 200.0, left: 20.0),
height: MediaQuery.of(context).size.height / 3,
width: MediaQuery.of(context).size.width / 3,
child:
Image.asset('assets/images/crosshair.png'),
),
),
]),
onPanUpdate: (tapInfo) {
setState(() {
xPosition += tapInfo.delta.dx;
yPosition += tapInfo.delta.dy;
});
}),
),
],
),
),
SizedBox(
height: 10.0,
),
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
_cameraTogglesRowWidget(),
_captureControlRowWidget(context),
Spacer()
],
),
SizedBox(height: 10.0)
],
),
),
),
);
}
Widget _cameraPreviewWidget(context) {
final size = MediaQuery.of(context).size;
if (!controller.value.isInitialized) {
return Container();
}
return ClipRect(
child: Container(
child: Transform.scale(
scale: controller.value.aspectRatio / size.aspectRatio,
child: Center(
child: AspectRatio(
aspectRatio: controller.value.aspectRatio,
child: CameraPreview(controller),
),
),
),
),
);
}
/// Display the control bar with buttons to take pictures
Widget _captureControlRowWidget(context) {
return Expanded(
child: Align(
alignment: Alignment.center,
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
mainAxisSize: MainAxisSize.max,
children: [
FloatingActionButton(
child: Icon(Icons.camera_alt),
backgroundColor: Colors.lightBlue,
onPressed: () {
_onCapturePressed(context);
})
],
),
),
);
}
/// Display a row of toggle to select the camera (or a message if no camera is available).
Widget _cameraTogglesRowWidget() {
if (cameras == null || cameras.isEmpty) {
return Spacer();
}
CameraDescription selectedCamera = cameras[selectedCameraIdx];
CameraLensDirection lensDirection = selectedCamera.lensDirection;
/// CHNAGE THIS TO INCREASE MAGNIFICATION
///
return Expanded(
child: Align(
alignment: Alignment.centerLeft,
child: FlatButton.icon(
onPressed: _onSwitchCamera,
icon: Icon(_getCameraLensIcon(lensDirection), color: Colors.white),
label: Text(
"10x",
// "${lensDirection.toString().toUpperCase().substring(lensDirection.toString().indexOf('.') + 1)}",
style: TextStyle(color: Colors.white),
)),
),
);
}
IconData _getCameraLensIcon(CameraLensDirection direction) {
switch (direction) {
case CameraLensDirection.back:
return Icons.camera_rear;
case CameraLensDirection.front:
return Icons.camera_front;
case CameraLensDirection.external:
return Icons.camera;
default:
return Icons.device_unknown;
}
}
void _onSwitchCamera() {
selectedCameraIdx =
selectedCameraIdx < cameras.length - 1 ? selectedCameraIdx + 1 : 0;
CameraDescription selectedCamera = cameras[selectedCameraIdx];
// _initCameraController(selectedCamera);
}
void _onCapturePressed(context) async {
// Take the Picture in a try / catch block. If anything goes wrong,
// catch the error.
try {
// Attempt to take a picture and log where it's been saved
final path = join(
// In this example, store the picture in the temp directory. Find
// the temp directory using the `path_provider` plugin.
(await getTemporaryDirectory()).path,
'${DateTime.now()}.png',
);
print(path);
await controller.takePicture(path);
// If the picture was taken, display it on a new screen
Navigator.push(
context,
MaterialPageRoute(
builder: (context) => PreviewImageScreen(imagePath: path),
),
);
} catch (e) {
// If an error occurs, log the error to the console.
print(e);
}
}
// CAMERA EXCEPTION METHOD
// void _showCameraException(CameraException e) {
// String errorText = 'Error: ${e.code}\nError Message: ${e.description}';
// print(errorText);
// print('Error: ${e.code}\n${e.description}');
// }
}
import'包:camera/camera.dart';
导入“package:path/path.dart”;
导入“package:path_provider/path_provider.dart”;
导入“package:tflite/tflite.dart”;
导入'dart:math'作为数学;
导入“../preview_screen.dart”;
导入“models.dart”;
typedef void回调(列表列表,int h,int w);
类Camera扩展StatefulWidget{
最后名单摄像机;
最终回调设置识别;
最终串模型;
摄像机(this.camers,this.model,this.setRecognitions);
@凌驾
_CameraState createState()=>新建;
}
类_CameraState扩展状态{
双xPosition=120;
双Y位置=150;
列出摄像机;
int-selectedCameraIdx;
字符串图像路径;
摄像机控制器;
bool isDetecting=假;
@凌驾
void initState(){
super.initState();
if(widget.cameras==null | | widget.cameras.length<1){
打印(widget.cameras);
打印(“未找到摄像头”);
}否则{
控制器=新的CameraController(
widget.cameras[0],
分辨率预置,高,
);
controller.initialize()。然后((){
如果(!已安装){
返回;
}
setState((){});
controller.startImageStream((CameraImage img){
如果(!isDetecting){
isDetecting=true;
int startTime=new DateTime.now().millissecondssinceepoch;
if(widget.model==mobilenet){
Tflite.runModelOnFrame(
字节列表:img.planes.map((平面){
返回平面。字节;
}).toList(),
图像高度:图像高度,
图像宽度:img.width,
结果:2,
).然后((承认){
int endTime=new DateTime.now().millissecondssinceepoch;
打印(“检测花费${endTime-startTime}”);
控件设置识别(识别、图像高度、图像宽度);
isDetecting=假;
});
}else if(widget.model==posenet){
Tflite.runPoseNetOnFrame(
字节列表:img.planes.map((平面){
返回平面。字节;
}).toList(),
图像高度:图像高度,
图像宽度:img.width,
结果:2,
).然后((承认){
int endTime=new DateTime.now().millissecondssinceepoch;
打印(“检测花费${endTime-startTime}”);
控件设置识别(识别、图像高度、图像宽度);
isDetecting=假;
});
}否则{
Tflite.detectObjectOnFrame(
字节列表:img.planes.map((平面){
返回平面。字节;
}).toList(),
model:widget.model==yolo?“yolo”:“SSDMobileNet”,
图像高度:图像高度,
图像宽度:img.width,
imageMean:widget.model==yolo?0:127.5,
imageStd:widget.model==yolo?255.0:127.5,
numResultsPerClass:1,
阈值:widget.model==yolo?0.2:0.4,
).然后((承认){
int endTime=new DateTime.now().millissecondssinceepoch;
打印(“检测花费${endTime-startTime}”);
控件设置识别(识别、图像高度、图像宽度);
isDetecting=假;
});
}
}
});
});
}
}
@凌驾
无效处置(){
控制器?.dispose();
super.dispose();
}
@凌驾
小部件构建(构建上下文){
if(controller==null | |!controller.value.isInitialized){
返回容器();
}
var tmp=MediaQuery.of(context).size;
var screenH=数学最大值(tmp高度、tmp宽度);
var screenW=数学最小值(tmp高度、tmp宽度);
tmp=controller.value.previewSize;
var previewH=数学最大值(tmp高度、tmp宽度);
var previewW=数学最小值(tmp.height,tmp.width);
var screenRatio=屏幕H/screenW;
var previewRatio=previewH/previewW;
返回脚手架(
appBar:appBar(
背景颜色:颜色。浅蓝色,
标题:对,
标高:5,
标题:文本(“病理”),
主体:容器(
装饰:新盒子装饰(颜色:颜色。黑色),
儿童:安全区(
子:列(
crossAxisAlignment:crossAxisAlignment.stretch,
儿童:[
扩大(
弹性:2,
子:新堆栈(
儿童:[
新容器(
对齐:对齐.center,
子项:_cameraPreviewWidget(上下文),
),
新对齐(
对齐:对齐.center,
儿童:手势检测器(
子:堆栈(子:[
定位(
顶部:yPosition,
左:xPosition,
子:容器(
//填充:仅限边缘设置(顶部:200.0,左侧