Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

capturing camera frame #69

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -2,13 +2,17 @@

import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.util.Size;

import androidx.camera.core.AspectRatio;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraControl;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
@@ -18,7 +22,12 @@
import com.google.common.util.concurrent.ListenableFuture;
import com.ultralytics.ultralytics_yolo.predict.Predictor;

import java.io.ByteArrayOutputStream;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;


public class CameraPreview {
@@ -30,6 +39,8 @@ public class CameraPreview {
private Activity activity;
private PreviewView mPreviewView;
private boolean busy = false;
private boolean shouldCaptureFrame = false;
BlockingQueue<byte[]> capturedFrameQueue = new LinkedBlockingQueue<>();

public CameraPreview(Context context) {
this.context = context;
@@ -71,6 +82,17 @@ private void bindPreview(int facing) {
imageAnalysis.setAnalyzer(Runnable::run, imageProxy -> {
predictor.predict(imageProxy, facing == CameraSelector.LENS_FACING_FRONT);

if (shouldCaptureFrame) {
shouldCaptureFrame = false;

try {
final byte[] data = toCapturedFrameData(imageProxy);
capturedFrameQueue.put(data);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
}

//clear stream for next image
imageProxy.close();
});
@@ -100,4 +122,42 @@ public void setCameraFacing(int facing) {
public void setScaleFactor(double factor) {
cameraControl.setZoomRatio((float)factor);
}

public CompletableFuture<byte[]> requestCaptureVideo(int timeoutSec) {
this.shouldCaptureFrame = true;

CompletableFuture<byte[]> future = new CompletableFuture<>();

CompletableFuture.runAsync(() -> {
try {
final byte[] captured = capturedFrameQueue.poll(timeoutSec, TimeUnit.SECONDS);
if (captured != null) {
future.complete(captured);
} else {
future.completeExceptionally(new Error("Buffer is null"));
}
} catch (InterruptedException e) {
future.completeExceptionally(e);
}
});

return future;
}

private byte[] toCapturedFrameData(ImageProxy imageProxy) {
Bitmap bitmap = ImageUtils.toBitmap(imageProxy);

final Bitmap outputBitmap = Bitmap.createBitmap(bitmap.getHeight(), bitmap.getWidth(), Bitmap.Config.ARGB_8888);
final Matrix transformationMatrix = ImageUtils.getTransformationMatrix(bitmap.getWidth(), bitmap.getHeight(),
bitmap.getHeight(), bitmap.getWidth(),
90, false);

Canvas canvas = new Canvas(outputBitmap);
canvas.drawBitmap(bitmap, transformationMatrix, null);

ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
outputBitmap.compress(Bitmap.CompressFormat.PNG, 100, outputStream);

return outputStream.toByteArray();
}
}
Original file line number Diff line number Diff line change
@@ -109,6 +109,9 @@ public void onMethodCall(@NonNull MethodCall call, @NonNull MethodChannel.Result
case "setZoomRatio":
setScaleFactor(call, result);
break;
case "captureCamera":
requestCameraCapture(call, result);
break;
default:
result.notImplemented();
break;
@@ -347,4 +350,15 @@ private void setScaleFactor(MethodCall call, MethodChannel.Result result) {
cameraPreview.setScaleFactor(factor);
}
}

private void requestCameraCapture(MethodCall call, MethodChannel.Result result) {
final int timeoutSec = call.argument("timeoutSec") instanceof Integer ? (int) call.argument("timeoutSec") : 3;

cameraPreview.requestCaptureVideo(timeoutSec)
.thenAccept(result::success)
.exceptionally(error -> {
result.error("CameraCaptureError", error.getMessage(), null);
return null;
});
}
}
54 changes: 54 additions & 0 deletions ios/Classes/MethodCallHandler.swift
Original file line number Diff line number Diff line change
@@ -16,6 +16,10 @@ class MethodCallHandler: VideoCaptureDelegate, InferenceTimeListener, ResultsLis
private var predictor: Predictor?
private let videoCapture: VideoCapture

private var shouldCaptureFrame: Bool = false
private var capturedFrameData: Data?
private let capturedFrameSemaphore = DispatchSemaphore(value: 0)

init(binaryMessenger: FlutterBinaryMessenger, videoCapture: VideoCapture) {
resultStreamHandler = ResultStreamHandler()
let resultsEventChannel = FlutterEventChannel(
@@ -55,6 +59,8 @@ class MethodCallHandler: VideoCaptureDelegate, InferenceTimeListener, ResultsLis
closeCamera(args: args, result: result)
} else if call.method == "detectImage" || call.method == "classifyImage" {
predictOnImage(args: args, result: result)
} else if call.method == "captureCamera" {
requestCameraCapture(args: args, result: result)
}
}

@@ -63,6 +69,28 @@ class MethodCallHandler: VideoCaptureDelegate, InferenceTimeListener, ResultsLis
) {
predictor?.predict(
sampleBuffer: sampleBuffer, onResultsListener: self, onInferenceTime: self, onFpsRate: self)

if shouldCaptureFrame {
shouldCaptureFrame = false

captureFrameData(sampleBuffer: sampleBuffer)
}
}

private func captureFrameData(sampleBuffer: CMSampleBuffer) {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
else { return }

let ciImage = CIImage(cvImageBuffer: imageBuffer)

let context = CIContext()
guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent)
else { return }

let uiImage = UIImage(cgImage: cgImage)
self.capturedFrameData = uiImage.pngData()

self.capturedFrameSemaphore.signal()
}

private func loadModel(args: [String: Any], result: @escaping FlutterResult) async {
@@ -163,6 +191,32 @@ class MethodCallHandler: VideoCaptureDelegate, InferenceTimeListener, ResultsLis
})
}

private func requestCameraCapture(args: [String: Any], result: @escaping FlutterResult) {
let timeoutSec = args["timeoutSec"] as? Int ?? 3

shouldCaptureFrame = true

DispatchQueue.global(qos: .background).async {
let timeoutResult = self.capturedFrameSemaphore.wait(
timeout: .now() + DispatchTimeInterval.seconds(timeoutSec))
if timeoutResult == .timedOut {
result(
FlutterError(
code: "TIMEOUT", message: "Timeout to capture the camera image", details: nil))
return
}

let capturedCameraImage = self.capturedFrameData
self.capturedFrameData = nil

if capturedCameraImage == nil {
result(FlutterError(code: "NO_IMAGE", message: "No image captured", details: nil))
} else {
result(capturedCameraImage)
}
}
}

func on(predictions: [[String: Any]]) {
resultStreamHandler.sink(objects: predictions)
}
5 changes: 5 additions & 0 deletions lib/camera_preview/ultralytics_yolo_camera_controller.dart
Original file line number Diff line number Diff line change
@@ -63,6 +63,11 @@ class UltralyticsYoloCameraController
await _ultralyticsYoloPlatform.startCamera();
}

/// Captures the camera
Future<Uint8List?> captureCamera({int timeoutSec = 3}) async {
return _ultralyticsYoloPlatform.captureCamera(timeoutSec);
}

/// Stops the camera
Future<void> pauseLivePrediction() async {
await _ultralyticsYoloPlatform.pauseLivePrediction();
4 changes: 4 additions & 0 deletions lib/ultralytics_yolo_platform_channel.dart
Original file line number Diff line number Diff line change
@@ -68,6 +68,10 @@ class PlatformChannelUltralyticsYolo implements UltralyticsYoloPlatform {
.invokeMethod<String>('startCamera')
.catchError((dynamic e) => e.toString());

@override
Future<Uint8List?> captureCamera(int timeoutSec) => methodChannel
.invokeMethod<Uint8List>('captureCamera', {'timeoutSec': timeoutSec});

@override
Future<String?> pauseLivePrediction() => methodChannel
.invokeMethod<String>('pauseLivePrediction')
7 changes: 7 additions & 0 deletions lib/ultralytics_yolo_platform_interface.dart
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import 'dart:typed_data';

import 'package:plugin_platform_interface/plugin_platform_interface.dart';
import 'package:ultralytics_yolo/predict/classify/classification_result.dart';
import 'package:ultralytics_yolo/predict/detect/detected_object.dart';
@@ -67,6 +69,11 @@ abstract class UltralyticsYoloPlatform extends PlatformInterface {
throw UnimplementedError('startCamera has not been implemented.');
}

/// Captures the camera.
Future<Uint8List?> captureCamera(int timeoutSec) {
throw UnimplementedError('captureCamera has not been implemented.');
}

/// Start the live prediction.
Future<String?> pauseLivePrediction() {
throw UnimplementedError('pauseLivePrediction has not been implemented.');