Browse Source

Done i420 photo & viewfinder

camera_next
Vitaliy Zarubin 1 year ago
parent
commit
04c40ef0d7
  1. 113
      packages/camera/camera_aurora/aurora/camera_aurora_plugin.cpp
  2. 40
      packages/camera/camera_aurora/aurora/include/camera_aurora/camera_aurora_plugin.h
  3. 51
      packages/camera/camera_aurora/aurora/include/camera_aurora/texture_camera.h
  4. 209
      packages/camera/camera_aurora/aurora/include/camera_aurora/yuv.h
  5. 352
      packages/camera/camera_aurora/aurora/texture_camera.cpp
  6. 9
      packages/camera/camera_aurora/lib/camera_viewfinder.dart

113
packages/camera/camera_aurora/aurora/camera_aurora_plugin.cpp

@ -8,45 +8,45 @@
#include <flutter/platform-events.h>
#include <flutter/platform-methods.h>
#include <QtCore>
#include <QBuffer>
#include <QCamera>
#include <QCameraImageCapture>
#include <QCameraInfo>
#include <QMediaRecorder>
#include <QCameraImageCapture>
#include <QtCore>
#include <unistd.h>
namespace CameraAuroraMethods
{
constexpr auto PluginKey = "camera_aurora";
namespace CameraAuroraMethods {
constexpr auto PluginKey = "camera_aurora";
constexpr auto AvailableCameras = "availableCameras";
constexpr auto CreateCamera = "createCamera";
constexpr auto ResizeFrame = "resizeFrame";
constexpr auto Dispose = "dispose";
constexpr auto StartCapture = "startCapture";
constexpr auto StopCapture = "stopCapture";
constexpr auto TakePicture = "takePicture";
constexpr auto StartVideoRecording = "startVideoRecording";
constexpr auto StopVideoRecording = "stopVideoRecording";
constexpr auto PauseVideoRecording = "pauseVideoRecording";
constexpr auto ResumeVideoRecording = "resumeVideoRecording";
}
constexpr auto AvailableCameras = "availableCameras";
constexpr auto CreateCamera = "createCamera";
constexpr auto ResizeFrame = "resizeFrame";
constexpr auto Dispose = "dispose";
constexpr auto StartCapture = "startCapture";
constexpr auto StopCapture = "stopCapture";
constexpr auto TakePicture = "takePicture";
constexpr auto StartVideoRecording = "startVideoRecording";
constexpr auto StopVideoRecording = "stopVideoRecording";
constexpr auto PauseVideoRecording = "pauseVideoRecording";
constexpr auto ResumeVideoRecording = "resumeVideoRecording";
} // namespace CameraAuroraMethods
namespace CameraAuroraEvents
{
constexpr auto StateChanged = "cameraAuroraStateChanged";
namespace CameraAuroraEvents {
constexpr auto StateChanged = "cameraAuroraStateChanged";
}
CameraAuroraPlugin::CameraAuroraPlugin()
{
PlatformEvents::SubscribeOrientationChanged([this]([[maybe_unused]] DisplayRotation orientation) {
if (this->m_isEnableStateChanged) {
auto state = this->m_textureCamera->GetState();
EventChannel(CameraAuroraEvents::StateChanged, MethodCodecType::Standard).SendEvent(state);
}
});
PlatformEvents::SubscribeOrientationChanged(
[this]([[maybe_unused]] DisplayRotation orientation) {
if (this->m_isEnableStateChanged) {
auto state = this->m_textureCamera->GetState();
EventChannel(CameraAuroraEvents::StateChanged, MethodCodecType::Standard)
.SendEvent(state);
}
});
}
void CameraAuroraPlugin::RegisterWithRegistrar(PluginRegistrar &registrar)
@ -62,64 +62,52 @@ void CameraAuroraPlugin::RegisterWithRegistrar(PluginRegistrar &registrar)
void CameraAuroraPlugin::RegisterMethods(PluginRegistrar &registrar)
{
auto methods = [this](const MethodCall &call)
{
auto methods = [this](const MethodCall &call) {
const auto &method = call.GetMethod();
if (method == CameraAuroraMethods::AvailableCameras)
{
if (method == CameraAuroraMethods::AvailableCameras) {
onAvailableCameras(call);
return;
}
if (method == CameraAuroraMethods::CreateCamera)
{
if (method == CameraAuroraMethods::CreateCamera) {
onCreateCamera(call);
return;
}
if (method == CameraAuroraMethods::ResizeFrame)
{
if (method == CameraAuroraMethods::ResizeFrame) {
onResizeFrame(call);
return;
}
if (method == CameraAuroraMethods::Dispose)
{
if (method == CameraAuroraMethods::Dispose) {
onDispose(call);
return;
}
if (method == CameraAuroraMethods::StartCapture)
{
if (method == CameraAuroraMethods::StartCapture) {
onStartCapture(call);
return;
}
if (method == CameraAuroraMethods::StopCapture)
{
if (method == CameraAuroraMethods::StopCapture) {
onStopCapture(call);
return;
}
if (method == CameraAuroraMethods::TakePicture)
{
if (method == CameraAuroraMethods::TakePicture) {
onTakePicture(call);
return;
}
if (method == CameraAuroraMethods::StartVideoRecording)
{
if (method == CameraAuroraMethods::StartVideoRecording) {
onStartVideoRecording(call);
return;
}
if (method == CameraAuroraMethods::StopVideoRecording)
{
if (method == CameraAuroraMethods::StopVideoRecording) {
onStopVideoRecording(call);
return;
}
if (method == CameraAuroraMethods::PauseVideoRecording)
{
if (method == CameraAuroraMethods::PauseVideoRecording) {
onPauseVideoRecording(call);
return;
}
if (method == CameraAuroraMethods::ResumeVideoRecording)
{
if (method == CameraAuroraMethods::ResumeVideoRecording) {
onResumeVideoRecording(call);
return;
}
@ -127,20 +115,24 @@ void CameraAuroraPlugin::RegisterMethods(PluginRegistrar &registrar)
unimplemented(call);
};
registrar.RegisterMethodChannel(
CameraAuroraMethods::PluginKey,
MethodCodecType::Standard,
methods);
registrar.RegisterMethodChannel(CameraAuroraMethods::PluginKey,
MethodCodecType::Standard,
methods);
}
void CameraAuroraPlugin::RegisterEvents(PluginRegistrar &registrar)
{
registrar.RegisterEventChannel(
CameraAuroraEvents::StateChanged, MethodCodecType::Standard,
[this](const Encodable &)
{ this->m_isEnableStateChanged = true; return EventResponse(); },
[this](const Encodable &)
{ this->m_isEnableStateChanged = true; return EventResponse(); });
CameraAuroraEvents::StateChanged,
MethodCodecType::Standard,
[this](const Encodable &) {
this->m_isEnableStateChanged = true;
return EventResponse();
},
[this](const Encodable &) {
this->m_isEnableStateChanged = true;
return EventResponse();
});
}
/**
@ -204,7 +196,8 @@ void CameraAuroraPlugin::onStopCapture(const MethodCall &call)
void CameraAuroraPlugin::onTakePicture(const MethodCall &call)
{
call.SendSuccessResponse(m_textureCamera->GetImageBase64());
m_textureCamera->GetImageBase64(
[call](std::string base64) { call.SendSuccessResponse(base64); });
}
void CameraAuroraPlugin::onStartVideoRecording(const MethodCall &call)

40
packages/camera/camera_aurora/aurora/include/camera_aurora/camera_aurora_plugin.h

@ -5,43 +5,41 @@
#ifndef FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_H
#define FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_H
#include <flutter/plugin-interface.h>
#include <camera_aurora/globals.h>
#include <flutter/plugin-interface.h>
#include <camera_aurora/texture_camera.h>
#include <QImage>
#include <QtCore>
class PLUGIN_EXPORT CameraAuroraPlugin final
: public QObject,
public PluginInterface
class PLUGIN_EXPORT CameraAuroraPlugin final : public QObject, public PluginInterface
{
Q_OBJECT
Q_OBJECT
public:
CameraAuroraPlugin();
void RegisterWithRegistrar(PluginRegistrar &registrar) override;
CameraAuroraPlugin();
void RegisterWithRegistrar(PluginRegistrar &registrar) override;
private:
void RegisterMethods(PluginRegistrar &registrar);
void RegisterEvents(PluginRegistrar &registrar);
void RegisterMethods(PluginRegistrar &registrar);
void RegisterEvents(PluginRegistrar &registrar);
void onAvailableCameras(const MethodCall &call);
void onCreateCamera(const MethodCall &call);
void onResizeFrame(const MethodCall &call);
void onDispose(const MethodCall &call);
void onStartCapture(const MethodCall &call);
void onStopCapture(const MethodCall &call);
void onAvailableCameras(const MethodCall &call);
void onCreateCamera(const MethodCall &call);
void onResizeFrame(const MethodCall &call);
void onDispose(const MethodCall &call);
void onStartCapture(const MethodCall &call);
void onStopCapture(const MethodCall &call);
void onTakePicture(const MethodCall &call);
void onTakePicture(const MethodCall &call);
void onStartVideoRecording(const MethodCall &call);
void onStopVideoRecording(const MethodCall &call);
void onPauseVideoRecording(const MethodCall &call);
void onResumeVideoRecording(const MethodCall &call);
void onStartVideoRecording(const MethodCall &call);
void onStopVideoRecording(const MethodCall &call);
void onPauseVideoRecording(const MethodCall &call);
void onResumeVideoRecording(const MethodCall &call);
void unimplemented(const MethodCall &call);
void unimplemented(const MethodCall &call);
private:
TextureCamera *m_textureCamera;

51
packages/camera/camera_aurora/aurora/include/camera_aurora/texture_camera.h

@ -8,23 +8,11 @@
#include <flutter/plugin-interface.h>
#include <streamcamera/streamcamera.h>
#include <QImage>
#include <QtCore>
#include <chrono>
#include <thread>
typedef std::function<void()> CameraErrorHandler;
struct ResultYUV
{
uint8_t *y;
int strideY;
uint8_t *u;
int strideU;
uint8_t *v;
int strideV;
int width;
int height;
std::shared_ptr<uint8_t> raw;
};
typedef std::function<void(std::string)> TakeImageBase64Handler;
class TextureCamera : public Aurora::StreamCamera::CameraListener
{
@ -42,38 +30,29 @@ public:
std::map<Encodable, Encodable> StartCapture(size_t width, size_t height);
void StopCapture();
std::map<Encodable, Encodable> GetState();
std::string GetImageBase64();
void GetImageBase64(const TakeImageBase64Handler &takeImageBase64);
std::map<Encodable, Encodable> ResizeFrame(size_t width, size_t height);
private:
bool CreateCamera(std::string cameraName);
void SendError(std::string error);
void ResizeFrame(size_t width, size_t height, Aurora::StreamCamera::CameraInfo info, Aurora::StreamCamera::CameraCapability cap);
std::optional<std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame>> GetFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer);
ResultYUV YUVI420Scale(
const uint8_t* srcY,
const uint8_t* srcU,
const uint8_t* srcV,
int srcWidth,
int srcHeight,
int outWidth,
int outHeight
);
ResultYUV YUVI420Rotate(
const uint8_t* srcY,
const uint8_t* srcU,
const uint8_t* srcV,
int srcWidth,
int srcHeight,
int degree // 0, 90, 180, 270
);
void ResizeFrame(size_t width,
size_t height,
Aurora::StreamCamera::CameraInfo info,
Aurora::StreamCamera::CameraCapability cap);
std::optional<std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame>> GetFrame(
std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer);
private:
TextureRegistrar *m_plugin;
TakeImageBase64Handler m_takeImageBase64;
CameraErrorHandler m_onError;
std::string m_error;
Aurora::StreamCamera::CameraInfo m_info;
Aurora::StreamCamera::CameraCapability m_cap;
Aurora::StreamCamera::CameraManager *m_manager;
std::shared_ptr<Aurora::StreamCamera::Camera> m_camera;
std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> m_frame;
@ -86,6 +65,8 @@ private:
std::shared_ptr<uint8_t> m_bits;
int m_counter = 0;
bool m_isStart = false;
bool m_isTakeImageBase64 = false;
};
#endif /* TEXTURE_CAMERA_BUFFER_H */

209
packages/camera/camera_aurora/aurora/include/camera_aurora/yuv.h

@ -0,0 +1,209 @@
/*
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_YUV_H
#define FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_YUV_H
#include <libyuv/libyuv.h>
#include <QBuffer>
#include <QImage>
#include <QtCore>
namespace yuv {
struct Result
{
uint8_t *y;
int strideY;
uint8_t *u;
int strideU;
uint8_t *v;
int strideV;
int width;
int height;
std::shared_ptr<uint8_t> raw;
};
Result I420Scale(const uint8_t *srcY,
const uint8_t *srcU,
const uint8_t *srcV,
int srcWidth,
int srcHeight,
int outWidth,
int outHeight)
{
auto bufSize = (((outWidth * outHeight) + ((outWidth + 1) / 2) * ((outHeight + 1) / 2))) * 2;
auto buf = std::shared_ptr<uint8_t>((uint8_t *) malloc(bufSize), free);
auto y = buf.get();
auto u = buf.get() + outWidth * outHeight;
auto v = buf.get() + outWidth * outHeight + (outWidth * outHeight + 3) / 4;
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
auto outStrideY = outWidth;
auto outStrideU = (outWidth + 1) / 2;
auto outStrideV = outStrideU;
libyuv::I420Scale(srcY,
srcStrideY,
srcU,
srcStrideU,
srcV,
srcStrideV,
srcWidth,
srcHeight,
y,
outStrideY,
u,
outStrideU,
v,
outStrideV,
outWidth,
outHeight,
libyuv::kFilterBilinear);
return Result{y, outStrideY, u, outStrideU, v, outStrideV, outWidth, outHeight, buf};
}
Result I420Rotate(const uint8_t *srcY,
const uint8_t *srcU,
const uint8_t *srcV,
int srcWidth,
int srcHeight,
int degree)
{
int d = degree;
if (degree < 0) {
d = 360 - ((degree * -1) % 360);
}
if (degree >= 360) {
d = degree % 360;
}
int outWidth = srcWidth;
int outHeight = srcHeight;
if (d == 90 || d == 270) {
outWidth = srcHeight;
outHeight = srcWidth;
}
enum libyuv::RotationMode mode = (enum libyuv::RotationMode) d;
auto bufSize = (((outWidth * outHeight) + ((outWidth + 1) / 2) * ((outHeight + 1) / 2))) * 2;
auto buf = std::shared_ptr<uint8_t>((uint8_t *) malloc(bufSize), free);
auto y = buf.get();
auto u = buf.get() + outWidth * outHeight;
auto v = buf.get() + outWidth * outHeight + (outWidth * outHeight + 3) / 4;
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
auto outStrideY = outWidth;
auto outStrideU = (outWidth + 1) / 2;
auto outStrideV = outStrideU;
libyuv::I420Rotate(srcY,
srcStrideY,
srcU,
srcStrideU,
srcV,
srcStrideV,
y,
outStrideY,
u,
outStrideU,
v,
outStrideV,
srcWidth,
srcHeight,
mode);
return Result{y, outStrideY, u, outStrideU, v, outStrideV, outWidth, outHeight, buf};
}
std::shared_ptr<uint8_t> I420ToARGB(
const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV, int srcWidth, int srcHeight)
{
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
auto bits = std::shared_ptr<uint8_t>((uint8_t *) malloc(srcWidth * srcHeight * 4), free);
libyuv::I420ToARGB(srcY,
srcStrideY,
srcU,
srcStrideU,
srcV,
srcStrideV,
bits.get(),
srcWidth * 4,
srcWidth,
srcHeight);
return bits;
}
QImage I420ToQImage(
const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV, int srcWidth, int srcHeight)
{
QSize size(srcWidth, srcHeight);
QImage image(size, QImage::Format_RGBA8888);
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
libyuv::I420ToARGB(srcY,
srcStrideY,
srcU,
srcStrideU,
srcV,
srcStrideV,
reinterpret_cast<uint8_t *>(image.bits()),
srcWidth * 4,
srcWidth,
srcHeight);
return image;
}
std::string I420ToBase64(const uint8_t *srcY,
const uint8_t *srcU,
const uint8_t *srcV,
int srcWidth,
int srcHeight,
int orientationDisplay,
int orientationCamera,
int direction)
{
auto angle = orientationCamera - orientationDisplay;
if (direction < 0 && (orientationDisplay == 90 || orientationDisplay == 270)) {
angle -= 180;
}
auto result = I420Rotate(srcY, srcU, srcV, srcWidth, srcHeight, angle);
auto image = I420ToQImage(result.y, result.u, result.v, result.width, result.height);
QBuffer qbuffer;
qbuffer.open(QIODevice::WriteOnly);
image.save(&qbuffer, "JPEG");
return qbuffer.data().toBase64().toStdString();
}
} // namespace yuv
#endif /* FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_YUV_H */

352
packages/camera/camera_aurora/aurora/texture_camera.cpp

@ -3,19 +3,15 @@
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <camera_aurora/texture_camera.h>
#include <camera_aurora/yuv.h>
#include <flutter/platform-data.h>
#include <flutter/platform-methods.h>
#include <libyuv/libyuv.h>
extern "C" {
#include <libswscale/swscale.h>
}
#include <QtCore>
#include <QBuffer>
#define YUV_B_SIZE 4
TextureCamera::TextureCamera(TextureRegistrar *plugin, const CameraErrorHandler &onError)
@ -25,46 +21,12 @@ TextureCamera::TextureCamera(TextureRegistrar *plugin, const CameraErrorHandler
, m_camera(nullptr)
{}
std::string TextureCamera::GetImageBase64()
void TextureCamera::GetImageBase64(const TakeImageBase64Handler &takeImageBase64)
{
if (m_frame && m_camera) {
Aurora::StreamCamera::CameraInfo info;
if (m_camera->getInfo(info)) {
if (m_frame->chromaStep == 1) {
auto result = YUVI420Rotate(
m_frame->y,
m_frame->cr,
m_frame->cb,
m_frame->width,
m_frame->height,
info.mountAngle
);
QBuffer qbuffer;
qbuffer.open(QIODevice::WriteOnly);
QSize size(result.width, result.height);
QImage image(size, QImage::Format_RGBA8888);
libyuv::I420ToARGB(
result.y, result.strideY,
result.u, result.strideU,
result.v, result.strideV,
reinterpret_cast<uint8_t *>(image.bits()),
result.width * 4,
result.width,
result.height
);
image.save(&qbuffer, "JPEG");
return qbuffer.data().toBase64().toStdString();
}
}
}
return "";
m_takeImageBase64 = takeImageBase64;
m_isTakeImageBase64 = true;
}
std::vector<Encodable> TextureCamera::GetAvailableCameras()
{
std::vector<Encodable> cameras;
@ -87,48 +49,49 @@ std::vector<Encodable> TextureCamera::GetAvailableCameras()
std::map<Encodable, Encodable> TextureCamera::GetState()
{
Aurora::StreamCamera::CameraInfo info;
if (m_camera && m_camera->getInfo(info)) {
auto orientation = static_cast<int>(PlatformMethods::GetOrientation());
return std::map<Encodable, Encodable> {
{"id", info.id},
if (m_camera) {
return std::map<Encodable, Encodable>{
{"id", m_info.id},
{"textureId", m_textureId},
{"width", m_captureWidth},
{"height", m_captureHeight},
{"mountAngle", info.mountAngle},
{"rotationDisplay", orientation},
{"mountAngle", m_info.mountAngle},
{"rotationDisplay", static_cast<int>(PlatformMethods::GetOrientation())},
{"error", m_error},
};
}
return std::map<Encodable, Encodable>{
{"error", m_error}
};
return std::map<Encodable, Encodable>{{"error", m_error}};
}
bool TextureCamera::CreateCamera(std::string cameraName)
{
if (auto count = m_manager->getNumberOfCameras()) {
for (int index = 0; index < count; index++) {
Aurora::StreamCamera::CameraInfo info;
if (m_manager->getCameraInfo(index, info)) {
if (info.id == cameraName) {
m_camera = m_manager->openCamera(info.id);
if (m_camera) {
m_camera->setListener(this);
return true;
} else {
Unregister();
SendError("Stream camera error open camera");
return false;
if (m_camera) {
return true;
}
if (m_manager->init()) {
if (auto count = m_manager->getNumberOfCameras()) {
for (int index = 0; index < count; index++) {
if (m_manager->getCameraInfo(index, m_info)) {
if (m_info.id == cameraName) {
m_camera = m_manager->openCamera(m_info.id);
std::vector<Aurora::StreamCamera::CameraCapability> caps;
if (m_camera && m_manager->queryCapabilities(m_info.id, caps)) {
m_cap = caps.back();
return true;
} else {
SendError("Stream camera error open camera");
return false;
}
}
}
}
}
}
return false;
}
@ -140,21 +103,19 @@ void TextureCamera::SendError(std::string error)
std::map<Encodable, Encodable> TextureCamera::StartCapture(size_t width, size_t height)
{
m_viewWidth = width;
m_viewHeight = height;
if (m_camera && !m_camera->captureStarted()) {
m_viewWidth = width;
m_viewHeight = height;
if (m_camera) {
Aurora::StreamCamera::CameraInfo info;
if (m_camera->getInfo(info)) {
std::vector<Aurora::StreamCamera::CameraCapability> caps;
if (m_manager->queryCapabilities(info.id, caps)) {
auto cap = caps.back();
ResizeFrame(width, height, info, cap);
if (!m_camera->startCapture(cap)) {
Unregister();
SendError("Stream camera error start capture");
}
}
ResizeFrame(width, height, m_info, m_cap);
m_isStart = m_camera->startCapture(m_cap);
if (!m_isStart) {
Unregister();
SendError("Stream camera error start capture");
} else {
m_camera->setListener(this);
}
}
@ -163,8 +124,12 @@ std::map<Encodable, Encodable> TextureCamera::StartCapture(size_t width, size_t
void TextureCamera::StopCapture()
{
do {
std::this_thread::sleep_for(std::chrono::microseconds(10));
} while (m_isTakeImageBase64);
if (m_camera && m_camera->captureStarted()) {
m_frame = nullptr;
m_isStart = false;
m_camera->stopCapture();
}
}
@ -172,13 +137,11 @@ void TextureCamera::StopCapture()
std::map<Encodable, Encodable> TextureCamera::Register(std::string cameraName)
{
m_textureId = m_plugin->RegisterTexture(
[this]([[maybe_unused]] size_t width, [[maybe_unused]] size_t height) -> std::optional<TextureVariant> {
[this]([[maybe_unused]] size_t width,
[[maybe_unused]] size_t height) -> std::optional<TextureVariant> {
if (m_bits && m_captureWidth != 0 && m_captureHeight != 0) {
return std::make_optional(TextureVariant(FlutterPixelBuffer{
m_bits,
m_captureWidth,
m_captureHeight
}));
return std::make_optional(
TextureVariant(FlutterPixelBuffer{m_bits, m_captureWidth, m_captureHeight}));
}
return std::nullopt;
});
@ -192,7 +155,14 @@ std::map<Encodable, Encodable> TextureCamera::Register(std::string cameraName)
std::map<Encodable, Encodable> TextureCamera::Unregister()
{
StopCapture();
m_bits = nullptr;
if (m_camera) {
m_isStart = false;
m_camera->stopCapture();
m_camera->setListener(nullptr);
m_camera = nullptr;
}
m_plugin->UnregisterTexture(m_textureId);
@ -201,28 +171,22 @@ std::map<Encodable, Encodable> TextureCamera::Unregister()
m_textureId = 0;
m_captureWidth = 0;
m_captureHeight = 0;
m_bits = nullptr;
m_camera = nullptr;
return GetState();
}
std::map<Encodable, Encodable> TextureCamera::ResizeFrame(size_t width, size_t height)
{
if (m_camera && m_camera->captureStarted() && !(width == m_captureWidth || height == m_captureHeight)) {
Aurora::StreamCamera::CameraInfo info;
if (m_camera->getInfo(info)) {
std::vector<Aurora::StreamCamera::CameraCapability> caps;
if (m_manager->queryCapabilities(info.id, caps)) {
auto cap = caps.back();
ResizeFrame(width, height, info, cap);
}
}
if (m_isStart && !(width == m_captureWidth || height == m_captureHeight)) {
ResizeFrame(width, height, m_info, m_cap);
}
return GetState();
}
void TextureCamera::ResizeFrame(size_t width, size_t height, Aurora::StreamCamera::CameraInfo info, Aurora::StreamCamera::CameraCapability cap)
void TextureCamera::ResizeFrame(size_t width,
size_t height,
Aurora::StreamCamera::CameraInfo info,
Aurora::StreamCamera::CameraCapability cap)
{
auto cw = cap.width;
auto ch = cap.height;
@ -246,64 +210,61 @@ void TextureCamera::ResizeFrame(size_t width, size_t height, Aurora::StreamCamer
}
}
std::optional<std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame>> TextureCamera::GetFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer)
std::optional<std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame>> TextureCamera::GetFrame(
std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer)
{
auto frame = buffer->mapYCbCr();
if (m_isTakeImageBase64) {
std::string base64 = "";
if (frame->chromaStep == 1 /* I420 */) {
base64 = yuv::I420ToBase64(frame->y,
frame->cr,
frame->cb,
frame->width,
frame->height,
static_cast<int>(PlatformMethods::GetOrientation()),
m_info.mountAngle,
m_info.id.find("front") != std::string::npos ? -1 : 1);
}
m_takeImageBase64(base64);
m_isTakeImageBase64 = false;
}
m_counter += 1;
if (m_counter < 0) {
m_counter = 0;
}
if (m_counter %3 == 0 || !m_camera) {
if (m_counter % 3 == 0) {
return std::nullopt;
}
m_frame = buffer->mapYCbCr();
return m_frame;
return frame;
}
void TextureCamera::onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer)
{
if (auto optional = GetFrame(buffer)) {
if (!m_isStart) {
return;
}
if (auto optional = GetFrame(buffer)) {
auto frame = optional.value();
if (!m_camera || !m_camera->captureStarted()) {
return;
}
auto result = yuv::I420Scale(frame->y,
frame->cr,
frame->cb,
frame->width,
frame->height,
m_captureWidth,
m_captureHeight);
auto result = YUVI420Scale(
frame->y,
frame->cr,
frame->cb,
frame->width,
frame->height,
m_captureWidth,
m_captureHeight
);
if (!m_camera || !m_camera->captureStarted()) {
return;
}
m_bits = yuv::I420ToARGB(result.y, result.u, result.v, result.width, result.height),
auto bits = std::shared_ptr<uint8_t>((uint8_t *) malloc(result.width * result.height * 4), free);
libyuv::I420ToARGB(
result.y, result.strideY,
result.u, result.strideU,
result.v, result.strideV,
bits.get(),
result.width * 4,
result.width,
result.height
);
if (!m_camera || !m_camera->captureStarted()) {
return;
}
m_bits = bits;
m_plugin->MarkTextureAvailable(m_textureId);
}
}
@ -314,113 +275,8 @@ void TextureCamera::onCameraError(const std::string &errorDescription)
SendError(errorDescription);
}
void TextureCamera::onCameraParameterChanged([[maybe_unused]] Aurora::StreamCamera::CameraParameter parameter,
const std::string &value)
void TextureCamera::onCameraParameterChanged(
[[maybe_unused]] Aurora::StreamCamera::CameraParameter parameter, const std::string &value)
{
std::cout << "onCameraParameterChanged: " << value << std::endl;
}
ResultYUV TextureCamera::YUVI420Scale(
const uint8_t* srcY,
const uint8_t* srcU,
const uint8_t* srcV,
int srcWidth,
int srcHeight,
int outWidth,
int outHeight
)
{
auto bufSize = (((outWidth * outHeight) + ((outWidth + 1) / 2) * ((outHeight + 1) / 2))) * 2;
auto buf = std::shared_ptr<uint8_t>((uint8_t *) malloc(bufSize), free);
auto y = buf.get();
auto u = buf.get() + outWidth * outHeight;
auto v = buf.get() + outWidth * outHeight + (outWidth * outHeight + 3) / 4;
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
auto outStrideY = outWidth;
auto outStrideU = (outWidth + 1) / 2;
auto outStrideV = outStrideU;
libyuv::I420Scale(
srcY, srcStrideY,
srcU, srcStrideU,
srcV, srcStrideV,
srcWidth,
srcHeight,
y, outStrideY,
u, outStrideU,
v, outStrideV,
outWidth,
outHeight,
libyuv::kFilterBilinear
);
return ResultYUV{
y, outStrideY,
u, outStrideU,
v, outStrideV,
outWidth,
outHeight,
buf
};
}
ResultYUV TextureCamera::YUVI420Rotate(
const uint8_t* srcY,
const uint8_t* srcU,
const uint8_t* srcV,
int srcWidth,
int srcHeight,
int degree // 0, 90, 180, 270
)
{
int outWidth = srcWidth;
int outHeight = srcHeight;
if (degree == 90 || degree == 270) {
outWidth = srcHeight;
outHeight = srcWidth;
}
enum libyuv::RotationMode mode = (enum libyuv::RotationMode) degree;
auto bufSize = (((outWidth * outHeight) + ((outWidth + 1) / 2) * ((outHeight + 1) / 2))) * 2;
auto buf = std::shared_ptr<uint8_t>((uint8_t *) malloc(bufSize), free);
auto y = buf.get();
auto u = buf.get() + outWidth * outHeight;
auto v = buf.get() + outWidth * outHeight + (outWidth * outHeight + 3) / 4;
auto srcStrideY = srcWidth;
auto srcStrideU = (srcWidth + 1) / 2;
auto srcStrideV = srcStrideU;
auto outStrideY = outWidth;
auto outStrideU = (outWidth + 1) / 2;
auto outStrideV = outStrideU;
libyuv::I420Rotate(
srcY, srcStrideY,
srcU, srcStrideU,
srcV, srcStrideV,
y, outStrideY,
u, outStrideU,
v, outStrideV,
srcWidth,
srcHeight,
mode
);
return ResultYUV{
y, outStrideY,
u, outStrideU,
v, outStrideV,
outWidth,
outHeight,
buf
};
}

9
packages/camera/camera_aurora/lib/camera_viewfinder.dart

@ -54,7 +54,6 @@ class _CameraViewfinderState extends State<CameraViewfinder> {
} else if (_cameraState.isNotEmpty()) {
int turn = 0;
// @todo Different direction of rotation
bool isFront = _cameraState.id.contains('front');
switch (_cameraState.mountAngle) {
@ -73,16 +72,16 @@ class _CameraViewfinderState extends State<CameraViewfinder> {
switch (_cameraState.rotationDisplay) {
case 0:
turn += 0;
turn -= 0;
break;
case 90:
turn += isFront ? 1 : -1;
turn -= isFront ? -1 : 1;
break;
case 180:
turn += 2;
turn -= 2;
break;
default: // 270
turn += isFront ? 3 : -3;
turn -= isFront ? -3 : 3;
}
double cw = turn % 2 == 0 ? _cameraState.height : _cameraState.width;

Loading…
Cancel
Save