Error with IBM Visual Recognition Classification with Flutter

103 Views Asked by At

I am trying to take an image and send it to IBM Watson to classify it into one of 3 custom classifiers. Below is all my code.

import 'dart:io';

import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:flutter_ibm_watson/flutter_ibm_watson.dart';
import 'package:ibm_visual_recog_img_file/connection.dart';
import 'package:ourearth2020/screens/Community.dart';
import 'package:path/path.dart';
import 'dart:async';
import 'package:image_picker/image_picker.dart';
import 'package:flutter_speed_dial/flutter_speed_dial.dart';
import 'package:path_provider/path_provider.dart';

class VisualPage extends StatefulWidget {
  @override
  _VisualPageState createState() => _VisualPageState();
}

class _VisualPageState extends State<VisualPage> {
  CameraController _controller;
  List cameras;
  String path;
  var galleryImage;

  CameraDescription cameraDescription;

  Future initCamera() async {
    cameras = await availableCameras();
    var frontCamera = cameras.first;

    _controller = CameraController(frontCamera, ResolutionPreset.high);
    try {
      await _controller.initialize();
    } catch (e) {}
    print('Controller Is Init:' + _controller.value.isInitialized.toString());
    displayPreview();
  }
  
  bool displayPreview() {
    if (_controller == null || !_controller.value.isInitialized) {
      return false;
    } else {
      return true;
    }
  }

  Future getImageFromGallery() async {
    var image = await ImagePicker.pickImage(source: ImageSource.gallery);
    setState(() {
      galleryImage = image;
    });
    print('GALLERY IMAGE' + galleryImage.toString());
    return galleryImage;
  }

  @override
  void dispose() {
    // Dispose of the controller when the widget is disposed.
    _controller.dispose();
    super.dispose();
  }

  @override
  void initState() {
    super.initState();
    print('Running');
    initCamera();
  }

  @override
  Widget build(BuildContext context) {
    return Scaffold(
        backgroundColor: Colors.white,
        body: Stack(children: [
          displayPreview()
              ? AspectRatio(
            aspectRatio: MediaQuery.of(context).size.width /
                MediaQuery.of(context).size.height,
            child: CameraPreview(_controller),
          )
              : Container(
            child: CircularProgressIndicator(
              valueColor: AlwaysStoppedAnimation<Color>(Colors.yellow),
            ),
          ),
          Positioned(
            top: MediaQuery.of(context).size.height - 120,
            child: GestureDetector(
                onTap: () async {
                  await getImageFromGallery();
                  Navigator.push(context, MaterialPageRoute(builder: (context) =>
                    DisplayPicture(image: galleryImage)
                  ));
                },
                child: Icon(
                  Icons.image,
                  color: Colors.white,
                  size: 60,
                )),
          ),
          Positioned(
              top: MediaQuery.of(context).size.height - 120,
              left: MediaQuery.of(context).size.width / 2.2,
              child: GestureDetector(
                  behavior: HitTestBehavior.translucent,
                  child: Container(
                      child: Icon(
                        Icons.camera,
                        color: Colors.white,
                        size: 60,
                      )),
                  onTap: () async {
                    final path = (await getTemporaryDirectory()).path +
                        '${DateTime.now()}.png';
                    try {
                      await _controller.takePicture(path);
                      Navigator.push(
                          context,
                          MaterialPageRoute(
                              builder: (context) =>
                                  DisplayPicture(imagePath: path)));
                    } catch (e) {
                      print('EEEE' + e);
                    }
                  }))
        ]));
  }
}

class DisplayPicture extends StatelessWidget {
  String imagePath;
  File image;
  String _text;
  // File file = File(imagePath)
  DisplayPicture({this.imagePath, this.image});

   visualImageClassifier(File image) async{
      IamOptions options = await IamOptions(iamApiKey: "NRDjngCby2d-pSHOPyWQJxhuB6vOY2uOTCX6KV2BCfwB", url: "https://api.us-south.visual-recognition.watson.cloud.ibm.com/instances/ef286f4e-84c7-44e0-b63d-a6a49a142a30").build();
      VisualRecognition visualRecognition = new VisualRecognition(iamOptions: options, language: Language.ENGLISH); // Language.ENGLISH is language response
      ClassifiedImages classifiedImages = await visualRecognition.classifyImageFile(image.path);
      print(classifiedImages.getImages()[0].getClassifiers()[0]
          .getClasses()[0]
          .className);
      // print("${image.toString()}");
     // print('ACCESS'+options.accessToken);
      //print(options);
      //print("${image.path}");
      //print('CLASSIFICATION'+classifiedImages.customClasses.toString());    // StreamBuilder(
    //     stream: StreamMyClassifier(
    //         image,
    //         'NRDjngCby2d-pSHOPyWQJxhuB6vOY2uOTCX6KV2BCfwB', 'CompostxLandfillxRecycle_2056123069'),
    //     builder: (context, snapshot) {
    //       if (snapshot.hasData) {
    //         _text = snapshot.data;
    //         print(_text);
    //       }
    //       else {
    //  print('NO DATA AVAILABLE');
    //       }
    //
    //     }
    // );
  }

  @override
  Widget build(BuildContext context) {
    return Scaffold(body:Stack(children:[Center(child:image==null?Image.file(File(imagePath)):Image.file(image)),Positioned(
      top: MediaQuery.of(context).size.height/2,
      child: FloatingActionButton(onPressed:() async{
        await visualImageClassifier(image==null?File(imagePath):image);
      },
          child:Icon(Icons.arrow_right)),
    )]));

  }
}

The image is successfully displayed on my screen but once I send it through the visualRecognition.classifyImageFile(....); it gives me an error saying I can not use image because it only supports String. I converted it to String but it gives me the error below.

[ERROR:flutter/lib/ui/ui_dart_state.cc(166)] Unhandled Exception: FileSystemException: Cannot retrieve length of file, path = 'File: '/data/user/0/com.example.ourearth2020/cache2020-09-17 18:50:16.530957.png'' (OS Error: No such file or directory, errno = 2)
E/flutter (17606): #0      _File.length.<anonymous closure> (dart:io/file_impl.dart:366:9)
E/flutter (17606): #1      _rootRunUnary (dart:async/zone.dart:1198:47)
E/flutter (17606): #2      _CustomZone.runUnary (dart:async/zone.dart:1100:19)
E/flutter (17606): #3      _FutureListener.handleValue (dart:async/future_impl.dart:143:18)
E/flutter (17606): #4      Future._propagateToListeners.handleValueCallback (dart:async/future_impl.dart:696:45)
E/flutter (17606): #5      Future._propagateToListeners (dart:async/future_impl.dart:725:32)
E/flutter (17606): #6      Future._completeWithValue (dart:async/future_impl.dart:529:5)
E/flutter (17606): #7      Future._asyncCompleteWithValue.<anonymous closure> (dart:async/future_impl.dart:567:7)
E/flutter (17606): #8      _rootRun (dart:async/zone.dart:1190:13)
E/flutter (17606): #9      _CustomZone.run (dart:async/zone.dart:1093:19)
E/flutter (17606): #10     _CustomZone.runGuarded (dart:async/zone.dart:997:7)
E/flutter (17606): #11     _CustomZone.bindCallbackGuarded.<anonymous closure> (dart:async/zone.dart:1037:23)
E/flutter (17606): #12     _microtaskLoop (dart:async/schedule_microtask.dart:41:21)
E/flutter (17606): #13     _startMicrotaskLoop (dart:async/schedule_microtask.dart:50:5)
E/flutter (17606): 

Some of the questions I have: Can I get the confidence score using this? The last time I tried using those statements to classify the image it used a general classifier(if I input an image of a skyscraper it told me skyscraper) so how can I have it classify with my 3 classifiers?

Btw I already set up the IBM Cloud and it is fully functional. The library that I found from pub.dev is here https://pub.dev/packages/flutter_ibm_watson

EDIT error code for getImages() method

[ERROR:flutter/lib/ui/ui_dart_state.cc(166)] Unhandled Exception: NoSuchMethodError: The method 'getImages' was called on null.
E/flutter (31403): Receiver: null
E/flutter (31403): Tried calling: getImages()
E/flutter (31403): #0      Object.noSuchMethod (dart:core-patch/object_patch.dart:51:5)
E/flutter (31403): #1      DisplayPicture.visualImageClassifier (package:ourearth2020/screens/VisualPage.dart:147:30)
E/flutter (31403): <asynchronous suspension>
E/flutter (31403): #2      DisplayPicture.build.<anonymous closure> (package:ourearth2020/screens/VisualPage.dart:177:15)
E/flutter (31403): #3      _InkResponseState._handleTap (package:flutter/src/material/ink_well.dart:992:19)
E/flutter (31403): #4      _InkResponseState.build.<anonymous closure> (package:flutter/src/material/ink_well.dart:1098:38)
E/flutter (31403): #5      GestureRecognizer.invokeCallback (package:flutter/src/gestures/recognizer.dart:184:24)
E/flutter (31403): #6      TapGestureRecognizer.handleTapUp (package:flutter/src/gestures/tap.dart:524:11)
E/flutter (31403): #7      BaseTapGestureRecognizer._checkUp (package:flutter/src/gestures/tap.dart:284:5)
E/flutter (31403): #8      BaseTapGestureRecognizer.handlePrimaryPointer (package:flutter/src/gestures/tap.dart:219:7)
E/flutter (31403): #9      PrimaryPointerGestureRecognizer.handleEvent (package:flutter/src/gestures/recognizer.dart:477:9)
E/flutter (31403): #10     PointerRouter._dispatch (package:flutter/src/gestures/pointer_router.dart:78:12)
E/flutter (31403): #11     PointerRouter._dispatchEventToRoutes.<anonymous closure> (package:flutter/src/gestures/pointer_router.dart:124:9)
E/flutter (31403): #12     _LinkedHashMapMixin.forEach (dart:collection-patch/compact_hash.dart:377:8)
E/flutter (31403): #13     PointerRouter._dispatchEventToRoutes (package:flutter/src/gestures/pointer_router.dart:122:18)
E/flutter (31403): #14     PointerRouter.route (package:flutter/src/gestures/pointer_router.dart:108:7)
E/flutter (31403): #15     GestureBinding.handleEvent (package:flutter/src/gestures/binding.dart:220:19)
E/flutter (31403): #16     GestureBinding.dispatchEvent (package:flutter/src/gestures/binding.dart:200:22)
E/flutter (31403): #17     GestureBinding._handlePointerEvent (package:flutter/src/gestures/binding.dart:158:7)
E/flutter (31403): #18     GestureBinding._flushPointerEventQueue (package:flutter/src/gestures/binding.dart:104:7)
E/flutter (31403): #19     GestureBinding._handlePointerDataPacket (package:flutter/src/gestures/binding.dart:88:7)
E/flutter (31403): #20     _rootRunUnary (dart:async/zone.dart:1206:13)
E/flutter (31403): #21     _CustomZone.runUnary (dart:async/zone.dart:1100:19)
E/flutter (31403): #22     _CustomZone.runUnaryGuarded (dart:async/zone.dart:1005:7)
E/flutter (31403): #23     _invoke1 (dart:ui/hooks.dart:283:10)
E/flutter (31403): #24     _dispatchPointerDataPacket (dart:ui/hooks.dart:192:5)
E/flutter (31403): 

1

There are 1 best solutions below

2
chunhunghan On

You can copy past run full code below
You can see confidence score in working demo below
please change image.toString() to image.path because image is File
from

ClassifiedImages classifiedImages = await visualRecognition.classifyImageFile(image.toString());

to

ClassifiedImages classifiedImages = await visualRecognition.classifyImageFile(image.path);

working demo

enter image description here

working demo 2 for CameraPreview

I/flutter (31132): living room

enter image description here

full code

import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'dart:io';

import 'package:flutter_ibm_watson/flutter_ibm_watson.dart';
import 'package:image_picker/image_picker.dart';
import 'package:path_provider/path_provider.dart';

class VisualPage extends StatefulWidget {
  @override
  _VisualPageState createState() => _VisualPageState();
}

class _VisualPageState extends State<VisualPage> {
  CameraController _controller;
  List cameras;
  String path;
  var galleryImage;

  CameraDescription cameraDescription;

  Future initCamera() async {
    cameras = await availableCameras();
    var frontCamera = cameras.first;

    _controller = CameraController(frontCamera, ResolutionPreset.high);
    try {
      await _controller.initialize();
    } catch (e) {}
    print('Controller Is Init:' + _controller.value.isInitialized.toString());
    displayPreview();
    setState(() {});
  }

  bool displayPreview() {
    if (_controller == null || !_controller.value.isInitialized) {
      return false;
    } else {
      return true;
    }
  }

  Future getImageFromGallery() async {
    var image = await ImagePicker.pickImage(source: ImageSource.gallery);
    setState(() {
      galleryImage = image;
    });
    print('GALLERY IMAGE' + galleryImage.toString());
    return galleryImage;
  }

  @override
  void dispose() {
    // Dispose of the controller when the widget is disposed.
    _controller.dispose();
    super.dispose();
  }

  @override
  void initState() {
    super.initState();
    print('Running');

    initCamera();
  }

  @override
  Widget build(BuildContext context) {
    return Scaffold(
        backgroundColor: Colors.white,
        body: Stack(children: [
          displayPreview()
              ? AspectRatio(
                  aspectRatio: MediaQuery.of(context).size.width /
                      MediaQuery.of(context).size.height,
                  child: CameraPreview(_controller),
                )
              : Container(
                  child: CircularProgressIndicator(
                    valueColor: AlwaysStoppedAnimation<Color>(Colors.yellow),
                  ),
                ),
          Positioned(
            top: MediaQuery.of(context).size.height - 120,
            child: GestureDetector(
                onTap: () async {
                  await getImageFromGallery();
                  Navigator.push(
                      context,
                      MaterialPageRoute(
                          builder: (context) =>
                              DisplayPicture(image: galleryImage)));
                },
                child: Icon(
                  Icons.image,
                  color: Colors.white,
                  size: 60,
                )),
          ),
          Positioned(
              top: MediaQuery.of(context).size.height - 120,
              left: MediaQuery.of(context).size.width / 2.2,
              child: GestureDetector(
                  behavior: HitTestBehavior.translucent,
                  child: Container(
                      child: Icon(
                    Icons.camera,
                    color: Colors.white,
                    size: 60,
                  )),
                  onTap: () async {
                    final path = (await getTemporaryDirectory()).path +
                        '${DateTime.now()}.png';
                    try {
                      await _controller.takePicture(path);
                      Navigator.push(
                          context,
                          MaterialPageRoute(
                              builder: (context) =>
                                  DisplayPicture(imagePath: path)));
                    } catch (e) {
                      print('EEEE' + e);
                    }
                  }))
        ]));
  }
}

class DisplayPicture extends StatelessWidget {
  String imagePath;
  File image;
  String _text;
  // File file = File(imagePath)
  DisplayPicture({this.imagePath, this.image});

  visualImageClassifier(File image) async {
    IamOptions options = await IamOptions(
            iamApiKey: "NRDjngCby2d-pSHOPyWQJxhuB6vOY2uOTCX6KV2BCfwB",
            url:
                "https://api.us-south.visual-recognition.watson.cloud.ibm.com/instances/ef286f4e-84c7-44e0-b63d-a6a49a142a30")
        .build();
    VisualRecognition visualRecognition = new VisualRecognition(
        iamOptions: options,
        language: Language.ENGLISH); // Language.ENGLISH is language response
    ClassifiedImages classifiedImages =
        await visualRecognition.classifyImageFile(image.path);
    print(classifiedImages
        .getImages()[0]
        .getClassifiers()[0]
        .getClasses()[0]
        .className);
    // print("${image.toString()}");
    // print('ACCESS'+options.accessToken);
    //print(options);
    //print("${image.path}");
    //print('CLASSIFICATION'+classifiedImages.customClasses.toString());    // StreamBuilder(
    //     stream: StreamMyClassifier(
    //         image,
    //         'NRDjngCby2d-pSHOPyWQJxhuB6vOY2uOTCX6KV2BCfwB', 'CompostxLandfillxRecycle_2056123069'),
    //     builder: (context, snapshot) {
    //       if (snapshot.hasData) {
    //         _text = snapshot.data;
    //         print(_text);
    //       }
    //       else {
    //  print('NO DATA AVAILABLE');
    //       }
    //
    //     }
    // );
  }

  @override
  Widget build(BuildContext context) {
    return Scaffold(
        body: Stack(children: [
      Center(
          child:
              image == null ? Image.file(File(imagePath)) : Image.file(image)),
      Positioned(
        top: MediaQuery.of(context).size.height / 2,
        child: FloatingActionButton(
            onPressed: () async {
              await visualImageClassifier(
                  image == null ? File(imagePath) : image);
            },
            child: Icon(Icons.arrow_right)),
      )
    ]));
  }
}

void main() {
  runApp(MyApp());
}

class MyApp extends StatelessWidget {
  @override
  Widget build(BuildContext context) {
    return MaterialApp(
      title: 'Flutter Demo',
      theme: ThemeData(
        primarySwatch: Colors.blue,
        visualDensity: VisualDensity.adaptivePlatformDensity,
      ),
      home: VisualPage(),
    );
  }
}