Browse Source

Add nv12, done

camera_next
Vitaliy Zarubin 1 year ago
parent
commit
f320dea79d
  1. 1
      packages/camera/camera_aurora/aurora/include/camera_aurora/texture_camera.h
  2. 171
      packages/camera/camera_aurora/aurora/include/camera_aurora/yuv.h
  3. 94
      packages/camera/camera_aurora/aurora/include/camera_aurora/yuv_i420.h
  4. 87
      packages/camera/camera_aurora/aurora/include/camera_aurora/yuv_nv12.h
  5. 46
      packages/camera/camera_aurora/aurora/texture_camera.cpp

1
packages/camera/camera_aurora/aurora/include/camera_aurora/texture_camera.h

@ -65,6 +65,7 @@ private:
std::shared_ptr<uint8_t> m_bits;
int m_counter = 0;
int m_chromaStep = 1;
bool m_isStart = false;
bool m_isTakeImageBase64 = false;
};

171
packages/camera/camera_aurora/aurora/include/camera_aurora/yuv.h

@ -13,193 +13,64 @@
namespace yuv {
struct Result
{
uint8_t *y;
int strideY;
uint8_t *u;
int strideU;
uint8_t *v;
int strideV;
int width;
int height;
std::shared_ptr<uint8_t> raw;
};
Result I420Scale(const uint8_t *srcY,
std::string YUVToQImage(const uint8_t *srcY,
const uint8_t *srcU,
const uint8_t *srcV,
int srcWidth,
int srcHeight,
int outWidth,
int outHeight)
{
auto bufSize = (((outWidth * outHeight) + ((outWidth + 1) / 2) * ((outHeight + 1) / 2))) * 2;
auto buf = std::shared_ptr<uint8_t>((uint8_t *) malloc(bufSize), free);
auto y = buf.get();
auto u = buf.get() + outWidth * outHeight;
auto v = buf.get() + outWidth * outHeight + (outWidth * outHeight + 3) / 4;
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
auto outStrideY = outWidth;
auto outStrideU = (outWidth + 1) / 2;
auto outStrideV = outStrideU;
libyuv::I420Scale(srcY,
srcStrideY,
srcU,
srcStrideU,
srcV,
srcStrideV,
srcWidth,
srcHeight,
y,
outStrideY,
u,
outStrideU,
v,
outStrideV,
outWidth,
outHeight,
libyuv::kFilterBilinear);
return Result{y, outStrideY, u, outStrideU, v, outStrideV, outWidth, outHeight, buf};
}
Result I420Rotate(const uint8_t *srcY,
const uint8_t *srcU,
const uint8_t *srcV,
int srcWidth,
int srcHeight,
int degree)
int orientationDisplay,
int orientationCamera,
int direction)
{
int d = degree;
auto angle = orientationCamera - orientationDisplay;
if (degree < 0) {
d = 360 - ((degree * -1) % 360);
if (direction < 0 && (orientationDisplay == 90 || orientationDisplay == 270)) {
angle -= 180;
}
if (degree >= 360) {
d = degree % 360;
if (angle < 0) {
angle = 360 - ((angle * -1) % 360);
}
int outWidth = srcWidth;
int outHeight = srcHeight;
if (d == 90 || d == 270) {
outWidth = srcHeight;
outHeight = srcWidth;
if (angle >= 360) {
angle = angle % 360;
}
enum libyuv::RotationMode mode = (enum libyuv::RotationMode) d;
auto bufSize = (((outWidth * outHeight) + ((outWidth + 1) / 2) * ((outHeight + 1) / 2))) * 2;
auto buf = std::shared_ptr<uint8_t>((uint8_t *) malloc(bufSize), free);
auto y = buf.get();
auto u = buf.get() + outWidth * outHeight;
auto v = buf.get() + outWidth * outHeight + (outWidth * outHeight + 3) / 4;
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
auto outStrideY = outWidth;
auto outStrideU = (outWidth + 1) / 2;
auto outStrideV = outStrideU;
libyuv::I420Rotate(srcY,
srcStrideY,
srcU,
srcStrideU,
srcV,
srcStrideV,
y,
outStrideY,
u,
outStrideU,
v,
outStrideV,
srcWidth,
srcHeight,
mode);
return Result{y, outStrideY, u, outStrideU, v, outStrideV, outWidth, outHeight, buf};
}
QSize size(srcWidth, srcHeight);
QImage image(size, QImage::Format_RGBA8888);
std::shared_ptr<uint8_t> I420ToARGB(
const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV, int srcWidth, int srcHeight)
{
if (srcV) {
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
auto bits = std::shared_ptr<uint8_t>((uint8_t *) malloc(srcWidth * srcHeight * 4), free);
libyuv::I420ToARGB(srcY,
srcStrideY,
srcU,
srcStrideU,
srcV,
srcStrideV,
bits.get(),
reinterpret_cast<uint8_t *>(image.bits()),
srcWidth * 4,
srcWidth,
srcHeight);
return bits;
}
QImage I420ToQImage(
const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV, int srcWidth, int srcHeight)
{
QSize size(srcWidth, srcHeight);
QImage image(size, QImage::Format_RGBA8888);
} else {
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
auto srcStrideUV = srcWidth;
libyuv::I420ToARGB(srcY,
libyuv::NV12ToARGB(srcY,
srcStrideY,
srcU,
srcStrideU,
srcV,
srcStrideV,
srcU, // UV
srcStrideUV,
reinterpret_cast<uint8_t *>(image.bits()),
srcWidth * 4,
srcWidth,
srcHeight);
return image;
}
std::string I420ToBase64(const uint8_t *srcY,
const uint8_t *srcU,
const uint8_t *srcV,
int srcWidth,
int srcHeight,
int orientationDisplay,
int orientationCamera,
int direction)
{
auto angle = orientationCamera - orientationDisplay;
if (direction < 0 && (orientationDisplay == 90 || orientationDisplay == 270)) {
angle -= 180;
}
auto result = I420Rotate(srcY, srcU, srcV, srcWidth, srcHeight, angle);
auto image = I420ToQImage(result.y, result.u, result.v, result.width, result.height);
QBuffer qbuffer;
qbuffer.open(QIODevice::WriteOnly);
image.save(&qbuffer, "JPEG");
image.transformed(QMatrix().rotate(angle)).save(&qbuffer, "JPEG");
return qbuffer.data().toBase64().toStdString();
}

94
packages/camera/camera_aurora/aurora/include/camera_aurora/yuv_i420.h

@ -0,0 +1,94 @@
/*
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_YUV_I420_H
#define FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_YUV_I420_H
#include <libyuv/libyuv.h>
namespace yuv {
struct ResultI420
{
uint8_t *y;
int strideY;
uint8_t *u;
int strideU;
uint8_t *v;
int strideV;
int width;
int height;
std::shared_ptr<uint8_t> raw;
};
ResultI420 I420Scale(const uint8_t *srcY,
const uint8_t *srcU,
const uint8_t *srcV,
int srcWidth,
int srcHeight,
int outWidth,
int outHeight)
{
auto bufSize = (((outWidth * outHeight) + ((outWidth + 1) / 2) * ((outHeight + 1) / 2))) * 2;
auto buf = std::shared_ptr<uint8_t>((uint8_t *) malloc(bufSize), free);
auto y = buf.get();
auto u = buf.get() + outWidth * outHeight;
auto v = buf.get() + outWidth * outHeight + (outWidth * outHeight + 3) / 4;
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
auto outStrideY = outWidth;
auto outStrideU = (outWidth + 1) / 2;
auto outStrideV = outStrideU;
libyuv::I420Scale(srcY,
srcStrideY,
srcU,
srcStrideU,
srcV,
srcStrideV,
srcWidth,
srcHeight,
y,
outStrideY,
u,
outStrideU,
v,
outStrideV,
outWidth,
outHeight,
libyuv::kFilterBilinear);
return ResultI420{y, outStrideY, u, outStrideU, v, outStrideV, outWidth, outHeight, buf};
}
std::shared_ptr<uint8_t> I420ToARGB(
const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV, int srcWidth, int srcHeight)
{
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
auto bits = std::shared_ptr<uint8_t>((uint8_t *) malloc(srcWidth * srcHeight * 4), free);
libyuv::I420ToARGB(srcY,
srcStrideY,
srcU,
srcStrideU,
srcV,
srcStrideV,
bits.get(),
srcWidth * 4,
srcWidth,
srcHeight);
return bits;
}
} // namespace yuv
#endif /* FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_YUV_I420_H */

87
packages/camera/camera_aurora/aurora/include/camera_aurora/yuv_nv12.h

@ -0,0 +1,87 @@
/*
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_YUV_NV12_H
#define FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_YUV_NV12_H
#include <libyuv/libyuv.h>
namespace yuv {
struct ResultNV12
{
uint8_t *y;
int strideY;
uint8_t *uv;
int strideUV;
int width;
int height;
std::shared_ptr<uint8_t> raw;
};
ResultNV12 NV12Scale(
const uint8_t *srcY,
const uint8_t *srcUV,
int srcWidth,
int srcHeight,
int outWidth,
int outHeight
) {
auto bufSize = (((outWidth * outHeight) + ((outWidth + 1) / 2) * ((outHeight + 1) / 2))) * 2;
auto buf = std::shared_ptr<uint8_t>((uint8_t *) malloc(bufSize), free);
auto y = buf.get();
auto uv = buf.get() + outWidth * outHeight;
auto srcStrideY = srcWidth;
auto srcStrideUV = srcWidth;
auto outStrideY = outWidth;
auto outStrideUV = outWidth;
libyuv::NV12Scale(srcY,
srcStrideY,
srcUV,
srcStrideUV,
srcWidth,
srcHeight,
y,
outStrideY,
uv,
outStrideUV,
outWidth,
outHeight,
libyuv::kFilterBilinear);
return ResultNV12{y, outStrideY, uv, outStrideUV, outWidth, outHeight, buf};
}
std::shared_ptr<uint8_t> NV12ToARGB(
const uint8_t *srcY,
const uint8_t *srcUV,
int srcWidth,
int srcHeight
) {
auto srcStrideY = srcWidth;
auto srcStrideUV = srcWidth;
auto bits = std::shared_ptr<uint8_t>((uint8_t *) malloc(srcWidth * srcHeight * 4), free);
libyuv::NV12ToARGB(
srcY,
srcStrideY,
srcUV,
srcStrideUV,
bits.get(),
srcWidth * 4,
srcWidth,
srcHeight
);
return bits;
}
} // namespace yuv
#endif /* FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_YUV_NV12_H */

46
packages/camera/camera_aurora/aurora/texture_camera.cpp

@ -4,6 +4,8 @@
*/
#include <camera_aurora/texture_camera.h>
#include <camera_aurora/yuv.h>
#include <camera_aurora/yuv_i420.h>
#include <camera_aurora/yuv_nv12.h>
#include <flutter/platform-data.h>
#include <flutter/platform-methods.h>
@ -12,8 +14,6 @@ extern "C" {
#include <libswscale/swscale.h>
}
#define YUV_B_SIZE 4
TextureCamera::TextureCamera(TextureRegistrar *plugin, const CameraErrorHandler &onError)
: m_plugin(plugin)
, m_onError(onError)
@ -124,13 +124,18 @@ std::map<Encodable, Encodable> TextureCamera::StartCapture(size_t width, size_t
void TextureCamera::StopCapture()
{
m_isStart = false;
int index = 0;
do {
std::this_thread::sleep_for(std::chrono::microseconds(10));
} while (m_isTakeImageBase64);
std::this_thread::sleep_for(
std::chrono::milliseconds(m_chromaStep == 1 ? 10 : 500 /* r7 */));
index++;
} while (m_isTakeImageBase64 && index < 200);
if (m_camera && m_camera->captureStarted()) {
m_isStart = false;
m_camera->stopCapture();
m_camera->setListener(nullptr);
}
}
@ -219,7 +224,7 @@ std::optional<std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame>> TextureCa
std::string base64 = "";
if (frame->chromaStep == 1 /* I420 */) {
base64 = yuv::I420ToBase64(frame->y,
base64 = yuv::YUVToQImage(frame->y,
frame->cr,
frame->cb,
frame->width,
@ -229,8 +234,20 @@ std::optional<std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame>> TextureCa
m_info.id.find("front") != std::string::npos ? -1 : 1);
}
else if (frame->chromaStep == 2 /* NV12 */) {
base64 = yuv::YUVToQImage(frame->y,
frame->cr,
nullptr,
frame->width,
frame->height,
static_cast<int>(PlatformMethods::GetOrientation()),
m_info.mountAngle,
m_info.id.find("front") != std::string::npos ? -1 : 1);
}
m_takeImageBase64(base64);
m_isTakeImageBase64 = false;
return std::nullopt;
}
m_counter += 1;
@ -248,13 +265,16 @@ std::optional<std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame>> TextureCa
void TextureCamera::onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer)
{
if (!m_isStart) {
if (!m_isStart && !m_isTakeImageBase64) {
return;
}
if (auto optional = GetFrame(buffer)) {
auto frame = optional.value();
m_chromaStep = frame->chromaStep;
if (frame->chromaStep == 1 /* I420 */) {
auto result = yuv::I420Scale(frame->y,
frame->cr,
frame->cb,
@ -262,10 +282,18 @@ void TextureCamera::onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicB
frame->height,
m_captureWidth,
m_captureHeight);
m_bits = yuv::I420ToARGB(result.y, result.u, result.v, result.width, result.height),
m_plugin->MarkTextureAvailable(m_textureId);
} else if (frame->chromaStep == 2 /* NV12 */) {
auto result = yuv::NV12Scale(frame->y,
frame->cr,
frame->width,
frame->height,
m_captureWidth,
m_captureHeight);
m_bits = yuv::NV12ToARGB(result.y, result.uv, result.width, result.height);
m_plugin->MarkTextureAvailable(m_textureId);
}
}
}

Loading…
Cancel
Save