Skip to content

Changed video stream byte for RGB support #2

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions lib/flutter_camera_processing.dart
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,12 @@ class FlutterCameraProcessing {
CameraImage image) async =>
await _inference(OpenCVIsolateData(image));

static Uint32List opencvProcessStream(
static Uint8List opencvProcessStream(
Uint8List bytes, int width, int height) =>
Uint32List.fromList(bindings
Uint8List.fromList(bindings
.opencvProcessStream(bytes.allocatePointer(), width, height)
.cast<Int8>()
.asTypedList(width * height));
.cast<Uint8>()
.asTypedList(width * height * 3));

static opencvProcessImage(String input, String output) =>
bindings.opencvProcessImage(
Expand Down
7 changes: 3 additions & 4 deletions lib/generated_bindings.dart
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class GeneratedBindings {
/// @param width Image width.
/// @param height Image height.
/// @return Image bytes.
ffi.Pointer<ffi.UnsignedChar> opencvProcessStream(
ffi.Pointer<ffi.Uint8> opencvProcessStream(
ffi.Pointer<ffi.Char> bytes,
int width,
int height,
Expand All @@ -52,11 +52,10 @@ class GeneratedBindings {

late final _opencvProcessStreamPtr = _lookup<
ffi.NativeFunction<
ffi.Pointer<ffi.UnsignedChar> Function(
ffi.Pointer<ffi.Uint8> Function(
ffi.Pointer<ffi.Char>, ffi.Int, ffi.Int)>>('opencvProcessStream');
late final _opencvProcessStream = _opencvProcessStreamPtr.asFunction<
ffi.Pointer<ffi.UnsignedChar> Function(
ffi.Pointer<ffi.Char>, int, int)>();
ffi.Pointer<ffi.Uint8> Function(ffi.Pointer<ffi.Char>, int, int)>();

void opencvProcessImage(
ffi.Pointer<ffi.Char> input,
Expand Down
93 changes: 60 additions & 33 deletions lib/image_converter.dart
Original file line number Diff line number Diff line change
Expand Up @@ -9,62 +9,89 @@ import 'package:image/image.dart' as imglib;
// TODO: this is not working on iOS in portrait mode
Future<Uint8List> convertImage(CameraImage image) async {
try {
final WriteBuffer allBytes = WriteBuffer();
/*final WriteBuffer allBytes = WriteBuffer();
for (final Plane plane in image.planes) {
allBytes.putUint8List(plane.bytes);
}
final Uint8List bytes = allBytes.done().buffer.asUint8List();
return bytes;

// if (image.format.group == ImageFormatGroup.yuv420) {
// return image.planes.first.bytes;
// } else if (image.format.group == ImageFormatGroup.bgra8888) {
// // return image.planes.first.bytes;
// return convertBGRA8888(image).getBytes(order: imglib.ChannelOrder.bgra);
// }
bytes = allBytes.done().buffer.asUint8List();
return bytes;*/
if (image.format.group == ImageFormatGroup.yuv420) {
return convertYUV420(image).getBytes();
} else if (image.format.group == ImageFormatGroup.bgra8888) {
//return image.planes.first.bytes;
return convertBGRA8888(image).getBytes(order: imglib.ChannelOrder.rgba);
}
} catch (e) {
debugPrint(">>>>>>>>>>>> ERROR:$e");
}
return Uint8List(0);
}

// TODO: this is not working on iOS (yet) in Image v4
// Made change but don't know if its working or not. I don't have mac machine
imglib.Image convertBGRA8888(CameraImage image) {
final plane = image.planes.first;
final plane = image.planes[0];
return imglib.Image.fromBytes(
width: image.width,
height: image.height,
bytes: image.planes.first.bytes.buffer,
bytes: plane.bytes.buffer,
rowStride: plane.bytesPerRow,
bytesOffset: 28,
order: imglib.ChannelOrder.bgra,
order: imglib.ChannelOrder.rgba,
);
}

imglib.Image convertYUV420(CameraImage image) {
var img = imglib.Image(
width: image.width,
height: image.height,
); // Create Image buffer

Plane plane = image.planes[0];
const int shift = (0xFF << 24);

// Fill image buffer with plane[0] from YUV420_888
for (int x = 0; x < image.width; x++) {
for (int planeOffset = 0;
planeOffset < image.height * image.width;
planeOffset += image.width) {
final pixelColor = plane.bytes[planeOffset + x];
// color: 0x FF FF FF FF
// A B G R
// Calculate pixel color
var newVal = shift | (pixelColor << 16) | (pixelColor << 8) | pixelColor;

img.data
?.setPixel(x, planeOffset ~/ image.width, imglib.ColorInt8(newVal));
imglib.Image convertYUV420(CameraImage cameraImage) {
final imageWidth = cameraImage.width;
final imageHeight = cameraImage.height;
final yBuffer = cameraImage.planes[0].bytes;
final uBuffer = cameraImage.planes[1].bytes;
final vBuffer = cameraImage.planes[2].bytes;

final int yRowStride = cameraImage.planes[0].bytesPerRow;
final int yPixelStride = cameraImage.planes[0].bytesPerPixel!;

final int uvRowStride = cameraImage.planes[1].bytesPerRow;
final int uvPixelStride = cameraImage.planes[1].bytesPerPixel!;

final image = imglib.Image(width: imageWidth, height: imageHeight);

for (int h = 0; h < imageHeight; h++) {
int uvh = (h / 2).floor();

for (int w = 0; w < imageWidth; w++) {
int uvw = (w / 2).floor();

final yIndex = (h * yRowStride) + (w * yPixelStride);

// Y plane should have positive values belonging to [0...255]
final int y = yBuffer[yIndex];

// U/V Values are subsampled i.e. each pixel in U/V chanel in a
// YUV_420 image act as chroma value for 4 neighbouring pixels
final int uvIndex = (uvh * uvRowStride) + (uvw * uvPixelStride);

// U/V values ideally fall under [-0.5, 0.5] range. To fit them into
// [0, 255] range they are scaled up and centered to 128.
// Operation below brings U/V values to [-128, 127].
final int u = uBuffer[uvIndex];
final int v = vBuffer[uvIndex];

// Compute RGB values per formula above.
int r = (y + v * 1436 / 1024 - 179).round();
int g = (y - u * 46549 / 131072 + 44 - v * 93604 / 131072 + 91).round();
int b = (y + u * 1814 / 1024 - 227).round();

r = r.clamp(0, 255);
g = g.clamp(0, 255);
b = b.clamp(0, 255);

image.setPixelRgb(w, h, r, g, b);
}
}

return img;
return image;
}
2 changes: 1 addition & 1 deletion lib/isolate_utils.dart
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ class IsolateUtils {
width: image.width,
height: image.height,
bytes: result.buffer,
numChannels: 4,
numChannels: 3,
// order: imglib.ChannelOrder.bgra,
);
final resultBytes = Uint32List.fromList(imglib.encodeJpg(img));
Expand Down
8 changes: 6 additions & 2 deletions src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,11 @@ cmake_minimum_required(VERSION 3.10)

project(flutter_camera_processing LANGUAGES C CXX)

include_directories(include)
include_directories(
include
${CMAKE_CURRENT_SOURCE_DIR}/../cpp
)

add_library(lib_opencv SHARED IMPORTED)
set_target_properties(lib_opencv PROPERTIES IMPORTED_LOCATION ${CMAKE_CURRENT_SOURCE_DIR}/../android/src/main/jniLibs/${ANDROID_ABI}/libopencv_java4.so)

Expand All @@ -16,7 +20,7 @@ add_library(

set (BUILD_WRITERS ON)

add_subdirectory(zxing/core)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/../cpp/zxing/core zxing)

find_library(log-lib log)
target_link_libraries(flutter_camera_processing ZXing lib_opencv ${log-lib})
32 changes: 16 additions & 16 deletions src/native_opencv.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,38 +15,38 @@ extern "C"
}

FUNCTION_ATTRIBUTE
const unsigned char *opencvProcessStream(char *bytes, int width, int height)
const uint8_t *opencvProcessStream(char *bytes, int width, int height)
{
long long start = get_now();

// int rotation = 0;
// int rotation = 0;

Mat src = Mat(height, width, CV_8UC1, bytes);
Mat src = Mat(height, width, CV_8UC3, bytes);
Mat dst = src;

// handle rotation
// if (rotation == 90)
// {
// if (rotation == 90)
// {
// transpose(src, dst);
// flip(dst, dst, 1);
// }
// else if (rotation == 180)
// {
// flip(src, dst, -1);
// }
// else if (rotation == 270)
// {
// transpose(src, dst);
// flip(dst, dst, 0);
// }
// }
// else if (rotation == 180)
// {
// flip(src, dst, -1);
// }
// else if (rotation == 270)
// {
// transpose(src, dst);
// flip(dst, dst, 0);
// }

// Bitwise not the image
// bitwise_not(src, dst);
// bitwise_not(dst, dst);

// return the image as a pointer to the data
long length = dst.total() * dst.elemSize();
uchar *result = new uchar[length];
uint8_t *result = new uint8_t[length];
memcpy(result, dst.data, length);

delete[] bytes;
Expand Down
6 changes: 4 additions & 2 deletions src/native_opencv.h
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
#ifdef __cplusplus
#include <stdint.h>

extern "C"
{
#endif
Expand All @@ -17,9 +19,9 @@ extern "C"
* @param height Image height.
* @return Image bytes.
*/
const unsigned char *opencvProcessStream(char *bytes, int width, int height);
const uint8_t *opencvProcessStream(char *bytes, int width, int height);

void opencvProcessImage(char *input, char* output);
void opencvProcessImage(char *input, char *output);

#ifdef __cplusplus
}
Expand Down