refactoring code to use proper views in the UI and turned the camera instance into its own view with callbacks.

Also added in the model for initial use, and working to create the inferences off of TFLite using that view.
This commit is contained in:
Lucas Oskorep
2022-06-21 19:13:48 -04:00
parent 5e5a3f535f
commit 3dc77799a5
14 changed files with 1455 additions and 244 deletions
+10
View File
@@ -0,0 +1,10 @@
import 'dart:ui';
class CameraViewSingleton {
static double ratio = 0.0;
static Size screenSize = const Size(0, 0);
static Size inputImageSize = const Size(0, 0);
static Size get actualPreviewSize =>
Size(screenSize.width, screenSize.width * ratio);
}
+86
View File
@@ -0,0 +1,86 @@
import 'dart:io';
import 'package:camera/camera.dart';
import 'package:image/image.dart' as image_lib;
import 'package:logger/logger.dart';
import 'package:path_provider/path_provider.dart';
import 'logger.dart';
/// ImageUtils
class ImageUtils {
/// Converts a [CameraImage] in YUV420 format to [image_lib.Image] in RGB format
static image_lib.Image? convertCameraImage(CameraImage cameraImage) {
if (cameraImage.format.group == ImageFormatGroup.yuv420) {
return convertYUV420ToImage(cameraImage);
} else if (cameraImage.format.group == ImageFormatGroup.bgra8888) {
return convertBGRA8888ToImage(cameraImage);
} else {
return null;
}
}
/// Converts a [CameraImage] in BGRA888 format to [image_lib.Image] in RGB format
static image_lib.Image convertBGRA8888ToImage(CameraImage cameraImage) {
image_lib.Image img = image_lib.Image.fromBytes(
cameraImage.planes[0].width ?? 0,
cameraImage.planes[0].height ?? 0,
cameraImage.planes[0].bytes,
format: image_lib.Format.bgra
);
return img;
}
/// Converts a [CameraImage] in YUV420 format to [image_lib.Image] in RGB format
static image_lib.Image convertYUV420ToImage(CameraImage cameraImage) {
final int width = cameraImage.width;
final int height = cameraImage.height;
final int uvRowStride = cameraImage.planes[1].bytesPerRow;
final int uvPixelStride = cameraImage.planes[1].bytesPerPixel?? 0;
final image = image_lib.Image(width, height);
for (int w = 0; w < width; w++) {
for (int h = 0; h < height; h++) {
final int uvIndex =
uvPixelStride * (w / 2).floor() + uvRowStride * (h / 2).floor();
final int index = h * width + w;
final y = cameraImage.planes[0].bytes[index];
final u = cameraImage.planes[1].bytes[uvIndex];
final v = cameraImage.planes[2].bytes[uvIndex];
image.data[index] = ImageUtils.yuv2rgb(y, u, v);
}
}
return image;
}
/// Convert a single YUV pixel to RGB
static int yuv2rgb(int y, int u, int v) {
// Convert yuv pixel to rgb
int r = (y + v * 1436 / 1024 - 179).round();
int g = (y - u * 46549 / 131072 + 44 - v * 93604 / 131072 + 91).round();
int b = (y + u * 1814 / 1024 - 227).round();
// Clipping RGB values to be inside boundaries [ 0 , 255 ]
r = r.clamp(0, 255);
g = g.clamp(0, 255);
b = b.clamp(0, 255);
return 0xff000000 |
((b << 16) & 0xff0000) |
((g << 8) & 0xff00) |
(r & 0xff);
}
static void saveImage(image_lib.Image image, [int i = 0]) async {
List<int> jpeg = image_lib.JpegEncoder().encodeImage(image);
final appDir = await getTemporaryDirectory();
final appPath = appDir.path;
final fileOnDevice = File('$appPath/out$i.jpg');
await fileOnDevice.writeAsBytes(jpeg, flush: true);
logger.i('Saved $appPath/out$i.jpg');
}
}
+5
View File
@@ -0,0 +1,5 @@
import 'package:logger/logger.dart';
var logger = Logger(
printer: PrettyPrinter(),
);
+21
View File
@@ -0,0 +1,21 @@
/// Represents the recognition output from the model
class Recognition {
/// Index of the result
final int _id;
/// Label of the result
final String _label;
/// Confidence [0.0, 1.0]
final double _score;
Recognition(this._id, this._label, this._score);
int get id => _id;
String get label => _label;
double get score => _score;
@override
String toString() {
return 'Recognition(id: $id, label: $label, score: $score)';
}
}
+23
View File
@@ -0,0 +1,23 @@
/// Bundles different elapsed times
class Stats {
/// Total time taken in the isolate where the inference runs
int totalPredictTime;
/// [totalPredictTime] + communication overhead time
/// between main isolate and another isolate
int totalElapsedTime;
/// Time for which inference runs
int inferenceTime;
/// Time taken to pre-process the image
int preProcessingTime;
Stats(this.totalPredictTime, this.totalElapsedTime, this.inferenceTime,
this.preProcessingTime);
@override
String toString() {
return 'Stats{totalPredictTime: $totalPredictTime, totalElapsedTime: $totalElapsedTime, inferenceTime: $inferenceTime, preProcessingTime: $preProcessingTime}';
}
}