refactoring code to use proper views in the UI and turned the camera instance into its own view with callbacks.

Also added in the model for initial use, and working to create the inferences off of TFLite using that view.
This commit is contained in:
Lucas Oskorep
2022-06-21 19:13:48 -04:00
parent 5e5a3f535f
commit 3dc77799a5
14 changed files with 1455 additions and 244 deletions
+186
View File
@@ -0,0 +1,186 @@
import 'package:flutter/material.dart';
import 'package:camera/camera.dart';
import 'package:tensordex_mobile/ui/poke_view.dart';
import '../utils/logger.dart';
import '../utils/recognition.dart';
import '../utils/stats.dart';
class TensordexHome extends StatefulWidget {
const TensordexHome({Key? key, required this.title}) : super(key: key);
// This widget is the home page of your application. It is stateful, meaning
// that it has a State object (defined below) that contains fields that affect
// how it looks.
// This class is the configuration for the state. It holds the values (in this
// case the title) provided by the parent (in this case the App widget) and
// used by the build method of the State. Fields in a Widget subclass are
// always marked "final".
final String title;
@override
State<TensordexHome> createState() => _TensordexHomeState();
}
class _TensordexHomeState extends State<TensordexHome> {
int _counter = 0;
/// Results to draw bounding boxes
List<Recognition>? results;
/// Realtime stats
Stats? stats;
/// Scaffold Key
GlobalKey<ScaffoldState> scaffoldKey = GlobalKey();
void _incrementCounter() {
setState(() {
_counter++;
logger.d("Counter Incremented!");
logger.w("Counter Incremented!");
logger.e("Counter Incremented!");
});
}
// void onNewCameraSelected(CameraDescription cameraDescription) async {
// final previousCameraController = controller;
// // Instantiating the camera controller
// final CameraController cameraController = CameraController(
// cameraDescription,
// ResolutionPreset.high,
// imageFormatGroup: ImageFormatGroup.jpeg,
// );
//
// // Dispose the previous controller
// await previousCameraController.dispose();
//
// // Replace with the new controller
// if (mounted) {
// setState(() {
// controller = cameraController;
// });
// }
//
// // Update UI if controller updated
// cameraController.addListener(() {
// if (mounted) setState(() {});
// });
//
// // Initialize controller
// try {
// await cameraController.initialize();
// } on CameraException catch (e) {
// logger.e('Error initializing camera:', e);
// }
//
// // Update the Boolean
// if (mounted) {
// setState(() {
// _isCameraInitialized = controller.value.isInitialized;
// });
// }
// }
// @override
// void initState() {
// super.initState();
// WidgetsBinding.instance.addObserver(this);
// controller = CameraController(_cameras[0], ResolutionPreset.max);
// controller.initialize().then((_) {
// if (!mounted) {
// return;
// }
//
// setState(() {onNewCameraSelected(_cameras[0]);});
// }).catchError((Object e) {
// if (e is CameraException) {
// switch (e.code) {
// case 'CameraAccessDenied':
// logger.w('User denied camera access.');
// controller.initialize().then((_) {
// if (!mounted) {
// return;
// }
// setState(() {});
// }).catchError((Object e) {
// if (e is CameraException) {
// switch (e.code) {
// case 'CameraAccessDenied':
// logger.i('User denied camera access.');
// break;
// default:
// logger.i('Handle other errors.');
// break;
// }
// }
// });
// break;
// default:
// logger.i('Handle other errors.');
// break;
// }
// }
// });
// }
@override
void dispose() {
// controller.dispose();
// WidgetsBinding.instance.removeObserver(this);
super.dispose();
}
/// Callback to get inference results from [CameraView]
void resultsCallback(List<Recognition> results) {
setState(() {
this.results = results;
});
}
/// Callback to get inference stats from [CameraView]
void statsCallback(Stats stats) {
setState(() {
this.stats = stats;
});
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text(widget.title),
),
body: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
const Text(
'You have pushed the button this many times:',
),
Text(
'$_counter',
style: Theme.of(context).textTheme.headline4,
),
CameraView(
resultsCallback: resultsCallback,
statsCallback: statsCallback
),
],
),
),
floatingActionButton: GestureDetector(
onLongPress: () {
_incrementCounter();
},
child: FloatingActionButton(
onPressed: _incrementCounter,
tooltip: 'Increment',
child: const Icon(Icons.photo_camera),
), // This trailing comma makes auto-formatting nicer for build methods.
));
}
}
+181
View File
@@ -0,0 +1,181 @@
import 'dart:isolate';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import '../utils/logger.dart';
import '../utils/recognition.dart';
import '../utils/stats.dart';
/// [CameraView] sends each frame for inference
class CameraView extends StatefulWidget {
/// Callback to pass results after inference to [HomeView]
final Function(List<Recognition> recognitions) resultsCallback;
/// Callback to inference stats to [HomeView]
final Function(Stats stats) statsCallback;
/// Constructor
const CameraView(
{Key? key, required this.resultsCallback, required this.statsCallback})
: super(key: key);
@override
State<CameraView> createState() => _CameraViewState();
}
class _CameraViewState extends State<CameraView> with WidgetsBindingObserver {
/// List of available cameras
late List<CameraDescription> cameras;
/// Controller
late CameraController cameraController;
/// true when inference is ongoing
bool predicting = false;
// /// Instance of [Classifier]
// Classifier classifier;
//
// /// Instance of [IsolateUtils]
// IsolateUtils isolateUtils;
@override
void initState() {
initStateAsync();
super.initState();
}
void initStateAsync() async {
WidgetsBinding.instance.addObserver(this);
// Spawn a new isolate
// isolateUtils = IsolateUtils();
// await isolateUtils.start();
// Camera initialization
initializeCamera();
// Create an instance of classifier to load model and labels
// classifier = Classifier();
// Initially predicting = false
predicting = false;
}
/// Initializes the camera by setting [cameraController]
void initializeCamera() async {
cameras = await availableCameras();
// cameras[0] for rear-camera
cameraController =
CameraController(cameras[0], ResolutionPreset.low, enableAudio: false);
cameraController.initialize().then((_) async {
// Stream of image passed to [onLatestImageAvailable] callback
await cameraController.startImageStream(onLatestImageAvailable);
/// previewSize is size of each image frame captured by controller
///
/// 352x288 on iOS, 240p (320x240) on Android with ResolutionPreset.low
// Size previewSize = cameraController.value.previewSize;
//
// /// previewSize is size of raw input image to the model
// CameraViewSingleton.inputImageSize = previewSize;
//
// // the display width of image on screen is
// // same as screenWidth while maintaining the aspectRatio
// Size screenSize = MediaQuery.of(context).size;
// CameraViewSingleton.screenSize = screenSize;
// CameraViewSingleton.ratio = screenSize.width / previewSize.height;
});
}
@override
Widget build(BuildContext context) {
// Return empty container while the camera is not initialized
if (!cameraController.value.isInitialized || cameraController == null) {
return Container();
}
return AspectRatio(
aspectRatio: 1/cameraController.value.aspectRatio,
child: CameraPreview(cameraController));
}
/// Callback to receive each frame [CameraImage] perform inference on it
onLatestImageAvailable(CameraImage cameraImage) async {
// if (classifier.interpreter != null && classifier.labels != null) {
// // If previous inference has not completed then return
if (predicting) {
return;
}
setState(() {
predicting = true;
});
logger.i("RECIEVED IMAGE");
// logger.i(cameraImage);
// logger.i(cameraImage.height);
// logger.i(cameraImage.width);
// logger.i(cameraImage.planes[0]);
//
// var uiThreadTimeStart = DateTime.now().millisecondsSinceEpoch;
//
// // Data to be passed to inference isolate
// var isolateData = IsolateData(
// cameraImage, classifier.interpreter.address, classifier.labels);
//
// // We could have simply used the compute method as well however
// // it would be as in-efficient as we need to continuously passing data
// // to another isolate.
//
// /// perform inference in separate isolate
// Map<String, dynamic> inferenceResults = await inference(isolateData);
//
// var uiThreadInferenceElapsedTime =
// DateTime.now().millisecondsSinceEpoch - uiThreadTimeStart;
//
// // pass results to HomeView
// widget.resultsCallback(inferenceResults["recognitions"]);
//
// // pass stats to HomeView
// widget.statsCallback((inferenceResults["stats"] as Stats)
// ..totalElapsedTime = uiThreadInferenceElapsedTime);
// set predicting to false to allow new frames
setState(() {
predicting = false;
});
}
// /// Runs inference in another isolate
// Future<Map<String, dynamic>> inference(IsolateData isolateData) async {
// ReceivePort responsePort = ReceivePort();
// isolateUtils.sendPort
// .send(isolateData..responsePort = responsePort.sendPort);
// var results = await responsePort.first;
// return results;
// }
@override
void didChangeAppLifecycleState(AppLifecycleState state) async {
switch (state) {
case AppLifecycleState.paused:
cameraController.stopImageStream();
break;
case AppLifecycleState.resumed:
if (!cameraController.value.isStreamingImages) {
await cameraController.startImageStream(onLatestImageAvailable);
}
break;
default:
}
}
@override
void dispose() {
WidgetsBinding.instance.removeObserver(this);
cameraController.dispose();
super.dispose();
}
}
View File