android vision ORC java.lang.RuntimeException: takePicture failed when call mCameraSource.takePicture inside Handler -
i using android vision orc api develop example. example is: after text recognized in ocrdetectorprocessor.receivedetections() send handler message in ui activity. inside handler, want take picture, there error in case
11-06 00:12:43.932 15916-15916/com.google.android.gms.samples.vision.barcodereader e/androidruntime: fatal exception: main process: com.google.android.gms.samples.vision.barcodereader, pid: 15916 java.lang.runtimeexception: takepicture failed @ android.hardware.camera.native_takepicture(native method) @ android.hardware.camera.takepicture(camera.java:1523) @ com.google.android.gms.samples.vision.ocrreader.ui.camera.camerasource.takepicture(camerasource.java:517) @ com.google.android.gms.samples.vision.ocrreader.ocrcaptureactivity$6.handlemessage(ocrcaptureactivity.java:464) @ android.os.handler.dispatchmessage(handler.java:102) @ android.os.looper.loop(looper.java:158) @ android.app.activitythread.main(activitythread.java:7224) @ java.lang.reflect.method.invoke(native method) @ com.android.internal.os.zygoteinit$methodandargscaller.run(zygoteinit.java:1230) @ com.android.internal.os.zygoteinit.main(zygoteinit.java:1120)
below code using ocrcaptureactivity
@override public void oncreate(bundle icicle) { super.oncreate(icicle); setcontentview(r.layout.ocr_capture); mpreview = (camerasourcepreview) findviewbyid(r.id.preview); mgraphicoverlay = (graphicoverlay<ocrgraphic>) findviewbyid(r.id.graphicoverlay); // read parameters intent used launch activity. boolean autofocus = getintent().getbooleanextra(autofocus, false); boolean useflash = getintent().getbooleanextra(useflash, false); mhandler = new handler() { @override public void handlemessage(message msg) { switch (msg.what) { case 1: mcamerasource.takepicture(null, new camerasource.picturecallback() { @override public void onpicturetaken(byte[] bytes) { bitmap bmp = bitmapfactory.decodebytearray(bytes, 0, bytes.length); log.d("bitmap", bmp.getwidth() + "x" + bmp.getheight()); } }); break; } } }; // check camera permission before accessing camera. if // permission not granted yet, request permission. int rc = activitycompat.checkselfpermission(this, manifest.permission.camera); if (rc == packagemanager.permission_granted) { createcamerasource(autofocus, useflash); } else { requestcamerapermission(); } int rcexternalstorage = activitycompat.checkselfpermission(this, manifest.permission.write_external_storage); if(rcexternalstorage != packagemanager.permission_granted){ requestwritestoragepermission(); } gesturedetector = new gesturedetector(this, new capturegesturelistener()); scalegesturedetector = new scalegesturedetector(this, new scalelistener()); snackbar.make(mgraphicoverlay, "tap capture. pinch/stretch zoom", snackbar.length_long) .show(); .... } @suppresslint("inlinedapi") private void createcamerasource(boolean autofocus, boolean useflash) { context context = getapplicationcontext(); // text recognizer created find text. associated processor instance // set receive text recognition results , display graphics each text block // on screen. textrecognizer textrecognizer = new textrecognizer.builder(context).build(); textrecognizer.setprocessor(new ocrdetectorprocessor(mgraphicoverlay, this, mhandler)); if (!textrecognizer.isoperational()) { // note: first time app using vision api installed on // device, gms download native libraries device in order detection. // completes before app run first time. if // download has not yet completed, above call not detect text, // barcodes, or faces. // // isoperational() can used check if required native libraries // available. detectors automatically become operational once library // downloads complete on device. log.w(tag, "detector dependencies not yet available."); // check low storage. if there low storage, native library not // downloaded, detection not become operational. intentfilter lowstoragefilter = new intentfilter(intent.action_device_storage_low); boolean haslowstorage = registerreceiver(null, lowstoragefilter) != null; if (haslowstorage) { toast.maketext(this, r.string.low_storage_error, toast.length_long).show(); log.w(tag, getstring(r.string.low_storage_error)); } } // creates , starts camera. note uses higher resolution in comparison // other detection examples enable text recognizer detect small pieces of text. mcamerasource = new camerasource.builder(getapplicationcontext(), textrecognizer) .setfacing(camerasource.camera_facing_back) .setrequestedpreviewsize(1280, 1024) .setrequestedfps(2.0f) .setflashmode(useflash ? camera.parameters.flash_mode_torch : null) .setfocusmode(autofocus ? camera.parameters.focus_mode_continuous_picture : null) .build(); }
in ocrdetectorprocessor
@override public void receivedetections(detector.detections<textblock> detections) { mgraphicoverlay.clear(); sparsearray<textblock> items = detections.getdetecteditems(); (int = 0; < items.size(); ++i) { textblock item = items.valueat(i); ocrgraphic graphic = new ocrgraphic(mgraphicoverlay, item); mgraphicoverlay.add(graphic); mhandler.sendemptymessage(1); } }
note: there no problem when take picture button click can me ?
Comments
Post a Comment