diff --git a/lib/flutter_camera_processing.dart b/lib/flutter_camera_processing.dart index ac7f831..25f6e41 100644 --- a/lib/flutter_camera_processing.dart +++ b/lib/flutter_camera_processing.dart @@ -38,12 +38,12 @@ class FlutterCameraProcessing { CameraImage image) async => await _inference(OpenCVIsolateData(image)); - static Uint32List opencvProcessStream( + static Uint8List opencvProcessStream( Uint8List bytes, int width, int height) => - Uint32List.fromList(bindings + Uint8List.fromList(bindings .opencvProcessStream(bytes.allocatePointer(), width, height) - .cast() - .asTypedList(width * height)); + .cast() + .asTypedList(width * height * 3)); static opencvProcessImage(String input, String output) => bindings.opencvProcessImage( diff --git a/lib/generated_bindings.dart b/lib/generated_bindings.dart index dc15bb3..5624370 100644 --- a/lib/generated_bindings.dart +++ b/lib/generated_bindings.dart @@ -38,7 +38,7 @@ class GeneratedBindings { /// @param width Image width. /// @param height Image height. /// @return Image bytes. - ffi.Pointer opencvProcessStream( + ffi.Pointer opencvProcessStream( ffi.Pointer bytes, int width, int height, @@ -52,11 +52,10 @@ class GeneratedBindings { late final _opencvProcessStreamPtr = _lookup< ffi.NativeFunction< - ffi.Pointer Function( + ffi.Pointer Function( ffi.Pointer, ffi.Int, ffi.Int)>>('opencvProcessStream'); late final _opencvProcessStream = _opencvProcessStreamPtr.asFunction< - ffi.Pointer Function( - ffi.Pointer, int, int)>(); + ffi.Pointer Function(ffi.Pointer, int, int)>(); void opencvProcessImage( ffi.Pointer input, diff --git a/lib/image_converter.dart b/lib/image_converter.dart index 415fc81..dbbbaa4 100644 --- a/lib/image_converter.dart +++ b/lib/image_converter.dart @@ -9,19 +9,21 @@ import 'package:image/image.dart' as imglib; // TODO: this is not working on iOS in portrait mode Future convertImage(CameraImage image) async { try { - final WriteBuffer allBytes = WriteBuffer(); + /*final WriteBuffer allBytes = WriteBuffer(); for (final Plane plane in image.planes) { allBytes.putUint8List(plane.bytes); } final Uint8List bytes = allBytes.done().buffer.asUint8List(); return bytes; - // if (image.format.group == ImageFormatGroup.yuv420) { - // return image.planes.first.bytes; - // } else if (image.format.group == ImageFormatGroup.bgra8888) { - // // return image.planes.first.bytes; - // return convertBGRA8888(image).getBytes(order: imglib.ChannelOrder.bgra); - // } + bytes = allBytes.done().buffer.asUint8List(); + return bytes;*/ + if (image.format.group == ImageFormatGroup.yuv420) { + return convertYUV420(image).getBytes(); + } else if (image.format.group == ImageFormatGroup.bgra8888) { + //return image.planes.first.bytes; + return convertBGRA8888(image).getBytes(order: imglib.ChannelOrder.rgba); + } } catch (e) { debugPrint(">>>>>>>>>>>> ERROR:$e"); } @@ -29,42 +31,67 @@ Future convertImage(CameraImage image) async { } // TODO: this is not working on iOS (yet) in Image v4 +// Made change but don't know if its working or not. I don't have mac machine imglib.Image convertBGRA8888(CameraImage image) { - final plane = image.planes.first; + final plane = image.planes[0]; return imglib.Image.fromBytes( width: image.width, height: image.height, - bytes: image.planes.first.bytes.buffer, + bytes: plane.bytes.buffer, rowStride: plane.bytesPerRow, bytesOffset: 28, - order: imglib.ChannelOrder.bgra, + order: imglib.ChannelOrder.rgba, ); } -imglib.Image convertYUV420(CameraImage image) { - var img = imglib.Image( - width: image.width, - height: image.height, - ); // Create Image buffer - - Plane plane = image.planes[0]; - const int shift = (0xFF << 24); - - // Fill image buffer with plane[0] from YUV420_888 - for (int x = 0; x < image.width; x++) { - for (int planeOffset = 0; - planeOffset < image.height * image.width; - planeOffset += image.width) { - final pixelColor = plane.bytes[planeOffset + x]; - // color: 0x FF FF FF FF - // A B G R - // Calculate pixel color - var newVal = shift | (pixelColor << 16) | (pixelColor << 8) | pixelColor; - - img.data - ?.setPixel(x, planeOffset ~/ image.width, imglib.ColorInt8(newVal)); +imglib.Image convertYUV420(CameraImage cameraImage) { + final imageWidth = cameraImage.width; + final imageHeight = cameraImage.height; + final yBuffer = cameraImage.planes[0].bytes; + final uBuffer = cameraImage.planes[1].bytes; + final vBuffer = cameraImage.planes[2].bytes; + + final int yRowStride = cameraImage.planes[0].bytesPerRow; + final int yPixelStride = cameraImage.planes[0].bytesPerPixel!; + + final int uvRowStride = cameraImage.planes[1].bytesPerRow; + final int uvPixelStride = cameraImage.planes[1].bytesPerPixel!; + + final image = imglib.Image(width: imageWidth, height: imageHeight); + + for (int h = 0; h < imageHeight; h++) { + int uvh = (h / 2).floor(); + + for (int w = 0; w < imageWidth; w++) { + int uvw = (w / 2).floor(); + + final yIndex = (h * yRowStride) + (w * yPixelStride); + + // Y plane should have positive values belonging to [0...255] + final int y = yBuffer[yIndex]; + + // U/V Values are subsampled i.e. each pixel in U/V chanel in a + // YUV_420 image act as chroma value for 4 neighbouring pixels + final int uvIndex = (uvh * uvRowStride) + (uvw * uvPixelStride); + + // U/V values ideally fall under [-0.5, 0.5] range. To fit them into + // [0, 255] range they are scaled up and centered to 128. + // Operation below brings U/V values to [-128, 127]. + final int u = uBuffer[uvIndex]; + final int v = vBuffer[uvIndex]; + + // Compute RGB values per formula above. + int r = (y + v * 1436 / 1024 - 179).round(); + int g = (y - u * 46549 / 131072 + 44 - v * 93604 / 131072 + 91).round(); + int b = (y + u * 1814 / 1024 - 227).round(); + + r = r.clamp(0, 255); + g = g.clamp(0, 255); + b = b.clamp(0, 255); + + image.setPixelRgb(w, h, r, g, b); } } - return img; + return image; } diff --git a/lib/isolate_utils.dart b/lib/isolate_utils.dart index 1e0a65d..af49539 100644 --- a/lib/isolate_utils.dart +++ b/lib/isolate_utils.dart @@ -82,7 +82,7 @@ class IsolateUtils { width: image.width, height: image.height, bytes: result.buffer, - numChannels: 4, + numChannels: 3, // order: imglib.ChannelOrder.bgra, ); final resultBytes = Uint32List.fromList(imglib.encodeJpg(img)); diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 97ccaaf..5f5a27c 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -2,7 +2,11 @@ cmake_minimum_required(VERSION 3.10) project(flutter_camera_processing LANGUAGES C CXX) -include_directories(include) +include_directories( + include + ${CMAKE_CURRENT_SOURCE_DIR}/../cpp +) + add_library(lib_opencv SHARED IMPORTED) set_target_properties(lib_opencv PROPERTIES IMPORTED_LOCATION ${CMAKE_CURRENT_SOURCE_DIR}/../android/src/main/jniLibs/${ANDROID_ABI}/libopencv_java4.so) @@ -16,7 +20,7 @@ add_library( set (BUILD_WRITERS ON) -add_subdirectory(zxing/core) +add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/../cpp/zxing/core zxing) find_library(log-lib log) target_link_libraries(flutter_camera_processing ZXing lib_opencv ${log-lib}) \ No newline at end of file diff --git a/src/native_opencv.cpp b/src/native_opencv.cpp index b785fa5..5c699b1 100644 --- a/src/native_opencv.cpp +++ b/src/native_opencv.cpp @@ -15,30 +15,30 @@ extern "C" } FUNCTION_ATTRIBUTE - const unsigned char *opencvProcessStream(char *bytes, int width, int height) + const uint8_t *opencvProcessStream(char *bytes, int width, int height) { long long start = get_now(); - // int rotation = 0; + // int rotation = 0; - Mat src = Mat(height, width, CV_8UC1, bytes); + Mat src = Mat(height, width, CV_8UC3, bytes); Mat dst = src; // handle rotation -// if (rotation == 90) -// { + // if (rotation == 90) + // { // transpose(src, dst); // flip(dst, dst, 1); -// } -// else if (rotation == 180) -// { -// flip(src, dst, -1); -// } -// else if (rotation == 270) -// { -// transpose(src, dst); -// flip(dst, dst, 0); -// } + // } + // else if (rotation == 180) + // { + // flip(src, dst, -1); + // } + // else if (rotation == 270) + // { + // transpose(src, dst); + // flip(dst, dst, 0); + // } // Bitwise not the image // bitwise_not(src, dst); @@ -46,7 +46,7 @@ extern "C" // return the image as a pointer to the data long length = dst.total() * dst.elemSize(); - uchar *result = new uchar[length]; + uint8_t *result = new uint8_t[length]; memcpy(result, dst.data, length); delete[] bytes; diff --git a/src/native_opencv.h b/src/native_opencv.h index 6b5b687..2902c2c 100644 --- a/src/native_opencv.h +++ b/src/native_opencv.h @@ -1,4 +1,6 @@ #ifdef __cplusplus +#include + extern "C" { #endif @@ -17,9 +19,9 @@ extern "C" * @param height Image height. * @return Image bytes. */ - const unsigned char *opencvProcessStream(char *bytes, int width, int height); + const uint8_t *opencvProcessStream(char *bytes, int width, int height); - void opencvProcessImage(char *input, char* output); + void opencvProcessImage(char *input, char *output); #ifdef __cplusplus }