refactoring code to use proper views in the UI and turned the camera instance into its own view with callbacks.
Also added in the model for initial use, and working to create the inferences off of TFLite using that view.
This commit is contained in:
@@ -0,0 +1,181 @@
|
||||
import 'dart:isolate';
|
||||
|
||||
import 'package:camera/camera.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
import '../utils/logger.dart';
|
||||
import '../utils/recognition.dart';
|
||||
import '../utils/stats.dart';
|
||||
|
||||
/// [CameraView] sends each frame for inference
|
||||
class CameraView extends StatefulWidget {
|
||||
/// Callback to pass results after inference to [HomeView]
|
||||
final Function(List<Recognition> recognitions) resultsCallback;
|
||||
|
||||
/// Callback to inference stats to [HomeView]
|
||||
final Function(Stats stats) statsCallback;
|
||||
|
||||
/// Constructor
|
||||
const CameraView(
|
||||
{Key? key, required this.resultsCallback, required this.statsCallback})
|
||||
: super(key: key);
|
||||
|
||||
@override
|
||||
State<CameraView> createState() => _CameraViewState();
|
||||
}
|
||||
|
||||
class _CameraViewState extends State<CameraView> with WidgetsBindingObserver {
|
||||
/// List of available cameras
|
||||
late List<CameraDescription> cameras;
|
||||
|
||||
/// Controller
|
||||
late CameraController cameraController;
|
||||
|
||||
/// true when inference is ongoing
|
||||
bool predicting = false;
|
||||
|
||||
// /// Instance of [Classifier]
|
||||
// Classifier classifier;
|
||||
//
|
||||
// /// Instance of [IsolateUtils]
|
||||
// IsolateUtils isolateUtils;
|
||||
|
||||
@override
|
||||
void initState() {
|
||||
initStateAsync();
|
||||
super.initState();
|
||||
}
|
||||
|
||||
void initStateAsync() async {
|
||||
WidgetsBinding.instance.addObserver(this);
|
||||
|
||||
// Spawn a new isolate
|
||||
// isolateUtils = IsolateUtils();
|
||||
// await isolateUtils.start();
|
||||
|
||||
// Camera initialization
|
||||
initializeCamera();
|
||||
|
||||
// Create an instance of classifier to load model and labels
|
||||
// classifier = Classifier();
|
||||
|
||||
// Initially predicting = false
|
||||
predicting = false;
|
||||
}
|
||||
|
||||
/// Initializes the camera by setting [cameraController]
|
||||
void initializeCamera() async {
|
||||
cameras = await availableCameras();
|
||||
|
||||
// cameras[0] for rear-camera
|
||||
cameraController =
|
||||
CameraController(cameras[0], ResolutionPreset.low, enableAudio: false);
|
||||
|
||||
cameraController.initialize().then((_) async {
|
||||
// Stream of image passed to [onLatestImageAvailable] callback
|
||||
await cameraController.startImageStream(onLatestImageAvailable);
|
||||
|
||||
/// previewSize is size of each image frame captured by controller
|
||||
///
|
||||
/// 352x288 on iOS, 240p (320x240) on Android with ResolutionPreset.low
|
||||
// Size previewSize = cameraController.value.previewSize;
|
||||
//
|
||||
// /// previewSize is size of raw input image to the model
|
||||
// CameraViewSingleton.inputImageSize = previewSize;
|
||||
//
|
||||
// // the display width of image on screen is
|
||||
// // same as screenWidth while maintaining the aspectRatio
|
||||
// Size screenSize = MediaQuery.of(context).size;
|
||||
// CameraViewSingleton.screenSize = screenSize;
|
||||
// CameraViewSingleton.ratio = screenSize.width / previewSize.height;
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
// Return empty container while the camera is not initialized
|
||||
if (!cameraController.value.isInitialized || cameraController == null) {
|
||||
return Container();
|
||||
}
|
||||
|
||||
return AspectRatio(
|
||||
aspectRatio: 1/cameraController.value.aspectRatio,
|
||||
child: CameraPreview(cameraController));
|
||||
}
|
||||
|
||||
/// Callback to receive each frame [CameraImage] perform inference on it
|
||||
onLatestImageAvailable(CameraImage cameraImage) async {
|
||||
// if (classifier.interpreter != null && classifier.labels != null) {
|
||||
// // If previous inference has not completed then return
|
||||
if (predicting) {
|
||||
return;
|
||||
}
|
||||
setState(() {
|
||||
predicting = true;
|
||||
});
|
||||
logger.i("RECIEVED IMAGE");
|
||||
// logger.i(cameraImage);
|
||||
// logger.i(cameraImage.height);
|
||||
// logger.i(cameraImage.width);
|
||||
// logger.i(cameraImage.planes[0]);
|
||||
//
|
||||
// var uiThreadTimeStart = DateTime.now().millisecondsSinceEpoch;
|
||||
//
|
||||
// // Data to be passed to inference isolate
|
||||
// var isolateData = IsolateData(
|
||||
// cameraImage, classifier.interpreter.address, classifier.labels);
|
||||
//
|
||||
// // We could have simply used the compute method as well however
|
||||
// // it would be as in-efficient as we need to continuously passing data
|
||||
// // to another isolate.
|
||||
//
|
||||
// /// perform inference in separate isolate
|
||||
// Map<String, dynamic> inferenceResults = await inference(isolateData);
|
||||
//
|
||||
// var uiThreadInferenceElapsedTime =
|
||||
// DateTime.now().millisecondsSinceEpoch - uiThreadTimeStart;
|
||||
//
|
||||
// // pass results to HomeView
|
||||
// widget.resultsCallback(inferenceResults["recognitions"]);
|
||||
//
|
||||
// // pass stats to HomeView
|
||||
// widget.statsCallback((inferenceResults["stats"] as Stats)
|
||||
// ..totalElapsedTime = uiThreadInferenceElapsedTime);
|
||||
|
||||
// set predicting to false to allow new frames
|
||||
setState(() {
|
||||
predicting = false;
|
||||
});
|
||||
}
|
||||
|
||||
// /// Runs inference in another isolate
|
||||
// Future<Map<String, dynamic>> inference(IsolateData isolateData) async {
|
||||
// ReceivePort responsePort = ReceivePort();
|
||||
// isolateUtils.sendPort
|
||||
// .send(isolateData..responsePort = responsePort.sendPort);
|
||||
// var results = await responsePort.first;
|
||||
// return results;
|
||||
// }
|
||||
|
||||
@override
|
||||
void didChangeAppLifecycleState(AppLifecycleState state) async {
|
||||
switch (state) {
|
||||
case AppLifecycleState.paused:
|
||||
cameraController.stopImageStream();
|
||||
break;
|
||||
case AppLifecycleState.resumed:
|
||||
if (!cameraController.value.isStreamingImages) {
|
||||
await cameraController.startImageStream(onLatestImageAvailable);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
WidgetsBinding.instance.removeObserver(this);
|
||||
cameraController.dispose();
|
||||
super.dispose();
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user