fixed classifier and added in a preliminary results view that shows what pokemon are currently being looked at.

This commit is contained in:
Lucas Oskorep
2022-06-22 21:44:15 -04:00
parent ebfbfb503d
commit 9ec737db46
10 changed files with 305 additions and 355 deletions
+65 -129
View File
@@ -2,16 +2,16 @@ import 'dart:isolate';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:tensordex_mobile/tflite/classifier.dart';
import 'package:tensordex_mobile/tflite/ml_isolate.dart';
import 'package:tflite_flutter/tflite_flutter.dart';
import 'package:tensordex_mobile/utils/image_utils.dart';
import '../tflite/classifier.dart';
import '../utils/logger.dart';
import '../utils/recognition.dart';
import '../utils/stats.dart';
import '../tflite/data/recognition.dart';
import '../tflite/data/stats.dart';
/// [CameraView] sends each frame for inference
class CameraView extends StatefulWidget {
/// [PokedexView] sends each frame for inference
class PokedexView extends StatefulWidget {
/// Callback to pass results after inference to [HomeView]
final Function(List<Recognition> recognitions) resultsCallback;
@@ -19,32 +19,26 @@ class CameraView extends StatefulWidget {
final Function(Stats stats) statsCallback;
/// Constructor
const CameraView(
const PokedexView(
{Key? key, required this.resultsCallback, required this.statsCallback})
: super(key: key);
@override
State<CameraView> createState() => _CameraViewState();
State<PokedexView> createState() => _PokedexViewState();
}
class _CameraViewState extends State<CameraView> with WidgetsBindingObserver {
/// List of available cameras
class _PokedexViewState extends State<PokedexView> with WidgetsBindingObserver {
late List<CameraDescription> cameras;
/// Controller
late CameraController cameraController;
Interpreter? interp;
late MLIsolate _mlIsolate;
/// true when inference is ongoing
bool predicting = false;
bool _cameraInitialized = false;
bool _classifierInitialized = false;
late Classifier classy;
// /// Instance of [Classifier]
// Classifier classifier;
//
// /// Instance of [IsolateUtils]
// IsolateUtils isolateUtils;
late Interpreter interpreter;
late Classifier classifier;
@override
void initState() {
@@ -54,40 +48,21 @@ class _CameraViewState extends State<CameraView> with WidgetsBindingObserver {
void initStateAsync() async {
WidgetsBinding.instance.addObserver(this);
// Spawn a new isolate
// isolateUtils = IsolateUtils();
// await isolateUtils.start();
// Camera initialization
_mlIsolate = MLIsolate();
await _mlIsolate.start();
initializeCamera();
// final gpuDelegateV2 = GpuDelegateV2(
// options: GpuDelegateOptionsV2(
// isPrecisionLossAllowed: false,
// inferencePreference: TfLiteGpuInferenceUsage.fastSingleAnswer,
// inferencePriority1: TfLiteGpuInferencePriority.minLatency,
// inferencePriority2: TfLiteGpuInferencePriority.auto,
// inferencePriority3: TfLiteGpuInferencePriority.auto,
// ));
logger.e("CREATING THE INTERPRETOR");
var interpreterOptions = InterpreterOptions();//..addDelegate(gpuDelegateV2);
interp = await Interpreter.fromAsset('efficientnet_v2s.tflite',
options: interpreterOptions);
logger.e("CREATING THE INTERPRETOR");
classy = Classifier(interpreter: interp);
logger.i(interp?.getOutputTensors());
// Create an instance of classifier to load model and labels
// classifier = Classifier();
// Initially predicting = false
initializeModel();
predicting = false;
}
void initializeModel() async {
var interpreterOptions = InterpreterOptions()..threads = 8;
interpreter = await Interpreter.fromAsset('efficientnet_v2s.tflite',
options: interpreterOptions);
classifier = Classifier(interpreter: interpreter);
_classifierInitialized = true;
}
/// Initializes the camera by setting [cameraController]
void initializeCamera() async {
cameras = await availableCameras();
@@ -97,101 +72,62 @@ class _CameraViewState extends State<CameraView> with WidgetsBindingObserver {
CameraController(cameras[0], ResolutionPreset.low, enableAudio: false);
cameraController.initialize().then((_) async {
/// previewSize is size of each image frame captured by controller
/// 352x288 on iOS, 240p (320x240) on Android with ResolutionPreset.low
// Stream of image passed to [onLatestImageAvailable] callback
await cameraController.startImageStream(onLatestImageAvailable);
/// previewSize is size of each image frame captured by controller
///
/// 352x288 on iOS, 240p (320x240) on Android with ResolutionPreset.low
// Size previewSize = cameraController.value.previewSize;
//
// /// previewSize is size of raw input image to the model
// CameraViewSingleton.inputImageSize = previewSize;
//
// // the display width of image on screen is
// // same as screenWidth while maintaining the aspectRatio
// Size screenSize = MediaQuery.of(context).size;
// CameraViewSingleton.screenSize = screenSize;
// CameraViewSingleton.ratio = screenSize.width / previewSize.height;
setState(() {
_cameraInitialized = true;
});
});
}
/// Callback to receive each frame [CameraImage] perform inference on it
onLatestImageAvailable(CameraImage cameraImage) async {
if (_classifierInitialized) {
if (predicting) {
return;
}
setState(() {
predicting = true;
});
var results = await inference(MLIsolateData(
cameraImage, classifier.interpreter.address, classifier.labels));
if (results.containsKey("recognitions")) {
widget.resultsCallback(results["recognitions"]);
}
if (results.containsKey("stats")) {
widget.statsCallback(results["stats"]);
}
logger.i(results);
setState(() {
predicting = false;
});
}
}
@override
Widget build(BuildContext context) {
// Return empty container while the camera is not initialized
if (!cameraController.value.isInitialized) {
if (!_cameraInitialized) {
return Container();
}
return AspectRatio(
aspectRatio: 1/cameraController.value.aspectRatio,
aspectRatio: 1 / cameraController.value.aspectRatio,
child: CameraPreview(cameraController));
}
/// Callback to receive each frame [CameraImage] perform inference on it
onLatestImageAvailable(CameraImage cameraImage) async {
// if (classifier.interpreter != null && classifier.labels != null) {
// // If previous inference has not completed then return
if (predicting) {
return;
}
setState(() {
predicting = true;
});
logger.i("RECIEVED IMAGE");
logger.i(cameraImage.format.group);
logger.i(cameraImage);
var converted = ImageUtils.convertCameraImage(cameraImage);
if (converted != null){
var result = classy.predict(converted);
logger.e("PREDICTED IMAGE");
logger.i(result);
}
// logger.i(cameraImage);
// logger.i(cameraImage.height);
// logger.i(cameraImage.width);
// logger.i(cameraImage.planes[0]);
//
// var uiThreadTimeStart = DateTime.now().millisecondsSinceEpoch;
//
// // Data to be passed to inference isolate
// var isolateData = IsolateData(
// cameraImage, classifier.interpreter.address, classifier.labels);
//
// // We could have simply used the compute method as well however
// // it would be as in-efficient as we need to continuously passing data
// // to another isolate.
//
// /// perform inference in separate isolate
// Map<String, dynamic> inferenceResults = await inference(isolateData);
//
// var uiThreadInferenceElapsedTime =
// DateTime.now().millisecondsSinceEpoch - uiThreadTimeStart;
//
// // pass results to HomeView
// widget.resultsCallback(inferenceResults["recognitions"]);
//
// // pass stats to HomeView
// widget.statsCallback((inferenceResults["stats"] as Stats)
// ..totalElapsedTime = uiThreadInferenceElapsedTime);
// set predicting to false to allow new frames
setState(() {
predicting = false;
});
/// Runs inference in another isolate
Future<Map<String, dynamic>> inference(MLIsolateData mlIsolateData) async {
ReceivePort responsePort = ReceivePort();
_mlIsolate.sendPort
.send(mlIsolateData..responsePort = responsePort.sendPort);
var results = await responsePort.first;
return results;
}
// /// Runs inference in another isolate
// Future<Map<String, dynamic>> inference(IsolateData isolateData) async {
// ReceivePort responsePort = ReceivePort();
// isolateUtils.sendPort
// .send(isolateData..responsePort = responsePort.sendPort);
// var results = await responsePort.first;
// return results;
// }
@override
void didChangeAppLifecycleState(AppLifecycleState state) async {
switch (state) {