inflate error google mobile vision

1.5k Views Asked by At

Hey guys, for my school project i'm building a dutch licenseplate scanner. i came a long way with just the text but now i'm trying to add the actual scanning part. therefore i am using the google mobile vision code. after running the code samples as a test (succeed) i tried to add the code into my project. after some setbacks i finnaly managed to get it semi-working. I have one problem: when pressing the actual camera button the whole application crashes and restarts.

Process: com.y_gap.menno.kentekenscanner, PID: 2885
   java.lang.RuntimeException: Unable to start activity ComponentInfo{com.y_gap.menno.kentekenscanner/com.y_gap.menno.kentekenscanner.OcrCaptureActivity}: android.view.InflateException: Binary XML file line #10: Error inflating class com.google.android.gms.samples.vision.ocrreader.ui.camera.CameraSourcePreview
       at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2298)
       at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2360)
       at android.app.ActivityThread.access$800(ActivityThread.java:144)
       at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1278)
       at android.os.Handler.dispatchMessage(Handler.java:102)
       at android.os.Looper.loop(Looper.java:135)
       at android.app.ActivityThread.main(ActivityThread.java:5221)
       at java.lang.reflect.Method.invoke(Native Method)
       at java.lang.reflect.Method.invoke(Method.java:372)
       at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:899)
       at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:694)
    Caused by: android.view.InflateException: Binary XML file line #10: Error inflating class com.google.android.gms.samples.vision.ocrreader.ui.camera.CameraSourcePreview
       at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:757)
       at android.view.LayoutInflater.rInflate(LayoutInflater.java:806)
       at android.view.LayoutInflater.inflate(LayoutInflater.java:504)
       at android.view.LayoutInflater.inflate(LayoutInflater.java:414)
       at android.view.LayoutInflater.inflate(LayoutInflater.java:365)
       at android.support.v7.app.AppCompatDelegateImplV9.setContentView(AppCompatDelegateImplV9.java:287)
       at android.support.v7.app.AppCompatActivity.setContentView(AppCompatActivity.java:139)
       at com.y_gap.menno.kentekenscanner.OcrCaptureActivity.onCreate(OcrCaptureActivity.java:87)
       at android.app.Activity.performCreate(Activity.java:5937)
       at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1105)
       at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2251)
       at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2360) 
       at android.app.ActivityThread.access$800(ActivityThread.java:144) 
       at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1278) 
       at android.os.Handler.dispatchMessage(Handler.java:102) 
       at android.os.Looper.loop(Looper.java:135) 
       at android.app.ActivityThread.main(ActivityThread.java:5221) 
       at java.lang.reflect.Method.invoke(Native Method) 
       at java.lang.reflect.Method.invoke(Method.java:372) 
       at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:899) 
       at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:694) 
    Caused by: java.lang.ClassNotFoundException: Didn't find class "com.google.android.gms.samples.vision.ocrreader.ui.camera.CameraSourcePreview" on path: DexPathList[[zip file "/data/app/com.y_gap.menno.kentekenscanner-1/base.apk", zip file "/data/app/com.y_gap.menno.kentekenscanner-1/split_lib_dependencies_apk.apk", zip file "/data/app/com.y_gap.menno.kentekenscanner-1/split_lib_slice_0_apk.apk", zip file "/data/app/com.y_gap.menno.kentekenscanner-1/split_lib_slice_1_apk.apk", zip file "/data/app/com.y_gap.menno.kentekenscanner-1/split_lib_slice_2_apk.apk", zip file "/data/app/com.y_gap.menno.kentekenscanner-1/split_lib_slice_3_apk.apk", zip file "/data/app/com.y_gap.menno.kentekenscanner-1/split_lib_slice_4_apk.apk", zip file "/data/app/com.y_gap.menno.kentekenscanner-1/split_lib_slice_5_apk.apk", zip file "/data/app/com.y_gap.menno.kentekenscanner-1/split_lib_slice_6_apk.apk", zip file "/data/app/com.y_gap.menno.kentekenscanner-1/split_lib_slice_7_apk.apk", zip file "/data/app/com.y_gap.menno.kentekenscanner-1/split_lib_slice_8_apk.apk", zip file "/data/app/com.y_gap.menno.kentekenscanner-1/split_lib_slice_9_apk.apk"],nativeLibraryDirectories=[/vendor/lib, /system/lib]]
       at dalvik.system.BaseDexClassLoader.findClass(BaseDexClassLoader.java:56)
       at java.lang.ClassLoader.loadClass(ClassLoader.java:511)
       at java.lang.ClassLoader.loadClass(ClassLoader.java:469)
       at android.view.LayoutInflater.createView(LayoutInflater.java:571)
       at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:743)
       at android.view.LayoutInflater.rInflate(LayoutInflater.java:806) 
       at android.view.LayoutInflater.inflate(LayoutInflater.java:504) 
       at android.view.LayoutInflater.inflate(LayoutInflater.java:414) 
       at android.view.LayoutInflater.inflate(LayoutInflater.java:365) 
       at android.support.v7.app.AppCompatDelegateImplV9.setContentView(AppCompatDelegateImplV9.java:287) 
       at android.support.v7.app.AppCompatActivity.setContentView(AppCompatActivity.java:139) 
       at com.y_gap.menno.kentekenscanner.OcrCaptureActivity.onCreate(OcrCaptureActivity.java:87) 
       at android.app.Activity.performCreate(Activity.java:5937) 
       at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1105) 
       at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2251) 
       at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2360) 
       at android.app.ActivityThread.access$800(ActivityThread.java:144) 
       at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1278) 
       at android.os.Handler.dispatchMessage(Handler.java:102) 
       at android.os.Looper.loop(Looper.java:135) 
       at android.app.ActivityThread.main(ActivityThread.java:5221) 
       at java.lang.reflect.Method.invoke(Native Method) 
       at java.lang.reflect.Method.invoke(Method.java:372) 
       at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:899) 
       at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:694) 
    Suppressed: java.lang.ClassNotFoundException: com.google.android.gms.samples.vision.ocrreader.ui.camera.CameraSourcePreview
       at java.lang.Class.classForName(Native Method)
       at java.lang.BootClassLoader.findClass(ClassLoader.java:781)
       at java.lang.BootClassLoader.loadClass(ClassLoader.java:841)
       at java.lang.ClassLoader.loadClass(ClassLoader.java:504)
            ... 23 more
    Caused by: java.lang.NoClassDefFoundE

the app should just start the camera activity to detect text.

package com.y_gap.menno.kentekenscanner;

import android.*;

import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GoogleApiAvailability;
import com.google.android.gms.common.api.CommonStatusCodes;
import com.y_gap.menno.kentekenscanner.ui.camera.*;

import com.google.android.gms.vision.text.TextBlock;
import com.google.android.gms.vision.text.TextRecognizer;

import java.io.IOException;

public final class OcrCaptureActivity extends AppCompatActivity {
    private static final String TAG = "OcrCaptureActivity";

    // Intent request code to handle updating play services if needed.
    private static final int RC_HANDLE_GMS = 9001;

    // Permission request codes need to be < 256
    private static final int RC_HANDLE_CAMERA_PERM = 2;

    // Constants used to pass extra data in the intent
    public static final String AutoFocus = "AutoFocus";
    public static final String UseFlash = "UseFlash";
    public static final String TextBlockObject = "String";

    private CameraSource mCameraSource;
    private CameraSourcePreview mPreview;
    private GraphicOverlay<OcrGraphic> mGraphicOverlay;

    // Helper objects for detecting taps and pinches.
    private ScaleGestureDetector scaleGestureDetector;
    private GestureDetector gestureDetector;

    /**
     * Initializes the UI and creates the detector pipeline.
     */
    @Override
    public void onCreate(Bundle icicle) {
        Log.w(TAG, "running onCreate() -> ocrcaptureactivity");

        super.onCreate(icicle);
        setContentView(R.layout.ocr_capture);

        Log.w(TAG, "1");
        mPreview = (CameraSourcePreview) findViewById(R.id.preview);
        mGraphicOverlay = findViewById(R.id.graphicOverlay);

        Log.w(TAG, "2");
        // read parameters from the intent used to launch the activity.
        boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
        boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);

        Log.w(TAG, "3");
        // Check for the camera permission before accessing the camera.  If the
        // permission is not granted yet, request permission.
        int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
        if (rc == PackageManager.PERMISSION_GRANTED) {
            createCameraSource(autoFocus, useFlash);
        } else {
            requestCameraPermission();
        }

        Log.w(TAG, "4");
        gestureDetector = new GestureDetector(this, new CaptureGestureListener());
        scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

        Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom",
                Snackbar.LENGTH_LONG)
                .show();
    }

    /**
     * Handles the requesting of the camera permission.  This includes
     * showing a "Snackbar" message of why the permission is needed then
     * sending the request.
     */
    private void requestCameraPermission() {
        Log.w(TAG, "Camera permission is not granted. Requesting permission");

        final String[] permissions = new String[]{Manifest.permission.CAMERA};

        if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
                Manifest.permission.CAMERA)) {
            ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
            return;
        }

        final Activity thisActivity = this;

        View.OnClickListener listener = new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                ActivityCompat.requestPermissions(thisActivity, permissions,
                        RC_HANDLE_CAMERA_PERM);
            }
        };

        Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale,
                Snackbar.LENGTH_INDEFINITE)
                .setAction(R.string.ok, listener)
                .show();
    }

    @Override
    public boolean onTouchEvent(MotionEvent e) {
        boolean b = scaleGestureDetector.onTouchEvent(e);

        boolean c = gestureDetector.onTouchEvent(e);

        return b || c || super.onTouchEvent(e);
    }

    /**
     * Creates and starts the camera.  Note that this uses a higher resolution in comparison
     * to other detection examples to enable the ocr detector to detect small text samples
     * at long distances.
     * <p>
     * Suppressing InlinedApi since there is a check that the minimum version is met before using
     * the constant.
     */
    @SuppressLint("InlinedApi")
    private void createCameraSource(boolean autoFocus, boolean useFlash) {
        Log.w(TAG, "running createCameraSource() -> ocrcaptureactivity");

        Context context = getApplicationContext();

        // Create the TextRecognizer
        TextRecognizer textRecognizer = new TextRecognizer.Builder(context).build();
        // TODO: Set the TextRecognizer's Processor.
        textRecognizer.setProcessor(new OcrDetectorProcessor(mGraphicOverlay));

        // Check if the TextRecognizer is operational.
        if (!textRecognizer.isOperational()) {
            Log.w(TAG, "Detector dependencies are not yet available.");

            // Check for low storage.  If there is low storage, the native library will not be
            // downloaded, so detection will not become operational.
            IntentFilter lowstorageFilter = new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW);
            boolean hasLowStorage = registerReceiver(null, lowstorageFilter) != null;

            if (hasLowStorage) {
                Toast.makeText(this, R.string.low_storage_error, Toast.LENGTH_LONG).show();
                Log.w(TAG, getString(R.string.low_storage_error));
            }
        }

        // Create the mCameraSource using the TextRecognizer.
        mCameraSource =
                new CameraSource.Builder(getApplicationContext(), textRecognizer)
                        .setFacing(CameraSource.CAMERA_FACING_BACK)
                        .setRequestedPreviewSize(1280, 1024)
                        .setRequestedFps(60.0f)
                        .setFlashMode(useFlash ? Camera.Parameters.FLASH_MODE_TORCH : null)
                        .setFocusMode(autoFocus ? Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE : null)
                        .build();
    }

    /**
     * Restarts the camera.
     */
    @Override
    protected void onResume() {
        super.onResume();
        startCameraSource();
    }

    /**
     * Stops the camera.
     */
    @Override
    protected void onPause() {
        super.onPause();
        if (mPreview != null) {
            mPreview.stop();
        }
    }

    /**
     * Releases the resources associated with the camera source, the associated detectors, and the
     * rest of the processing pipeline.
     */
    @Override
    protected void onDestroy() {
        super.onDestroy();
        if (mPreview != null) {
            mPreview.release();
        }
    }

    /**
     * Callback for the result from requesting permissions. This method
     * is invoked for every call on {@link #requestPermissions(String[], int)}.
     * <p>
     * <strong>Note:</strong> It is possible that the permissions request interaction
     * with the user is interrupted. In this case you will receive empty permissions
     * and results arrays which should be treated as a cancellation.
     * </p>
     *
     * @param requestCode  The request code passed in {@link #requestPermissions(String[], int)}.
     * @param permissions  The requested permissions. Never null.
     * @param grantResults The grant results for the corresponding permissions
     *                     which is either {@link PackageManager#PERMISSION_GRANTED}
     *                     or {@link PackageManager#PERMISSION_DENIED}. Never null.
     * @see #requestPermissions(String[], int)
     */
    @Override
    public void onRequestPermissionsResult(int requestCode,
                                           @NonNull String[] permissions,
                                           @NonNull int[] grantResults) {
        if (requestCode != RC_HANDLE_CAMERA_PERM) {
            Log.d(TAG, "Got unexpected permission result: " + requestCode);
            super.onRequestPermissionsResult(requestCode, permissions, grantResults);
            return;
        }

        if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
            Log.d(TAG, "Camera permission granted - initialize the camera source");
            // We have permission, so create the camerasource
            boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
            boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);
            createCameraSource(autoFocus, useFlash);
            return;
        }

        Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
                " Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));

        DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
            public void onClick(DialogInterface dialog, int id) {
                finish();
            }
        };

        AlertDialog.Builder builder = new AlertDialog.Builder(this);
        builder.setTitle("Multitracker sample")
                .setMessage(R.string.no_camera_permission)
                .setPositiveButton(R.string.ok, listener)
                .show();
    }

    /**
     * Starts or restarts the camera source, if it exists.  If the camera source doesn't exist yet
     * (e.g., because onResume was called before the camera source was created), this will be called
     * again when the camera source is created.
     */
    private void startCameraSource() throws SecurityException {
        // Check that the device has play services available.
        int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
                getApplicationContext());
        if (code != ConnectionResult.SUCCESS) {
            Dialog dlg =
                    GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
            dlg.show();
        }

        if (mCameraSource != null) {
            try {
                mPreview.start(mCameraSource, mGraphicOverlay);
            } catch (IOException e) {
                Log.e(TAG, "Unable to start camera source.", e);
                mCameraSource.release();
                mCameraSource = null;
            }
        }
    }

    /**
     * onTap is called to capture the first TextBlock under the tap location and return it to
     * the Initializing Activity.
     *
     * @param rawX - the raw position of the tap
     * @param rawY - the raw position of the tap.
     * @return true if the activity is ending.
     */
    private boolean onTap(float rawX, float rawY) {
        OcrGraphic graphic = mGraphicOverlay.getGraphicAtLocation(rawX, rawY);
        TextBlock text = null;
        if (graphic != null) {
            text = graphic.getTextBlock();
            if (text != null && text.getValue() != null) {
                Intent data = new Intent();
                data.putExtra(TextBlockObject, text.getValue());
                setResult(CommonStatusCodes.SUCCESS, data);
                finish();
            } else {
                Log.d(TAG, "text data is null");
            }
        } else {
            Log.d(TAG, "no text detected");
        }
        return text != null;
    }

    private class CaptureGestureListener extends GestureDetector.SimpleOnGestureListener {

        @Override
        public boolean onSingleTapConfirmed(MotionEvent e) {
            return onTap(e.getRawX(), e.getRawY()) || super.onSingleTapConfirmed(e);
        }
    }

    private class ScaleListener implements ScaleGestureDetector.OnScaleGestureListener {

        /**
         * Responds to scaling events for a gesture in progress.
         * Reported by pointer motion.
         *
         * @param detector The detector reporting the event - use this to
         *                 retrieve extended info about event state.
         * @return Whether or not the detector should consider this event
         * as handled. If an event was not handled, the detector
         * will continue to accumulate movement until an event is
         * handled. This can be useful if an application, for example,
         * only wants to update scaling factors if the change is
         * greater than 0.01.
         */
        @Override
        public boolean onScale(ScaleGestureDetector detector) {
            return false;
        }

        /**
         * Responds to the beginning of a scaling gesture. Reported by
         * new pointers going down.
         *
         * @param detector The detector reporting the event - use this to
         *                 retrieve extended info about event state.
         * @return Whether or not the detector should continue recognizing
         * this gesture. For example, if a gesture is beginning
         * with a focal point outside of a region where it makes
         * sense, onScaleBegin() may return false to ignore the
         * rest of the gesture.
         */
        @Override
        public boolean onScaleBegin(ScaleGestureDetector detector) {
            return true;
        }

        /**
         * Responds to the end of a scale gesture. Reported by existing
         * pointers going up.
         * <p/>
         * Once a scale has ended, {@link ScaleGestureDetector#getFocusX()}
         * and {@link ScaleGestureDetector#getFocusY()} will return focal point
         * of the pointers remaining on the screen.
         *
         * @param detector The detector reporting the event - use this to
         *                 retrieve extended info about event state.
         */
        @Override
        public void onScaleEnd(ScaleGestureDetector detector) {
            mCameraSource.doZoom(detector.getScaleFactor());
        }
    }
}

please help me solve this..

this is the xml file thats having the problem:

<LinearLayout
    xmlns:android="http://schemas.android.com/apk/res/android"
    android:id="@+id/topLayout"
    android:layout_width="match_parent"
    android:layout_height="match_parent"
    android:keepScreenOn="true">

    <com.google.android.gms.samples.vision.ocrreader.ui.camera.CameraSourcePreview
        xmlns:android="http://schemas.android.com/apk/res/android"
        android:id="@+id/preview"
        android:layout_width="match_parent"
        android:layout_height="match_parent">

        <com.google.android.gms.samples.vision.ocrreader.ui.camera.GraphicOverlay
            xmlns:android="http://schemas.android.com/apk/res/android"
            android:id="@+id/graphicOverlay"
            android:layout_width="match_parent"
            android:layout_height="match_parent" />

    </com.google.android.gms.samples.vision.ocrreader.ui.camera.CameraSourcePreview>

</LinearLayout>
0

There are 0 best solutions below