Browse Source

[texture] Add camera and helpers

embedder_texture
Vitaliy Zarubin 1 year ago
parent
commit
0ccba5006e
  1. 4
      packages/embedder_texture/aurora/CMakeLists.txt
  2. 93
      packages/embedder_texture/aurora/camera.cpp
  3. 42
      packages/embedder_texture/aurora/camera_egl_helper.cpp
  4. 106
      packages/embedder_texture/aurora/camera_helper.cpp
  5. 124
      packages/embedder_texture/aurora/camera_pixel_buffer.cpp
  6. 77
      packages/embedder_texture/aurora/camera_pixels_helper.cpp
  7. 24
      packages/embedder_texture/aurora/embedder_texture_plugin.cpp
  8. 33
      packages/embedder_texture/aurora/include/embedder_texture/camera.h
  9. 21
      packages/embedder_texture/aurora/include/embedder_texture/camera_egl_helper.h
  10. 33
      packages/embedder_texture/aurora/include/embedder_texture/camera_pixels_helper.h
  11. 10
      packages/embedder_texture/aurora/include/embedder_texture/embedder_texture_plugin.h

4
packages/embedder_texture/aurora/CMakeLists.txt

@ -23,7 +23,9 @@ pkg_check_modules(SC REQUIRED IMPORTED_TARGET streamcamera)
add_library(${PLUGIN_NAME} SHARED
embedder_texture_plugin.cpp
camera_pixel_buffer.cpp
camera_pixels_helper.cpp
camera_egl_helper.cpp
camera.cpp
)
set_target_properties(${PLUGIN_NAME} PROPERTIES CXX_VISIBILITY_PRESET hidden)

93
packages/embedder_texture/aurora/camera.cpp

@ -0,0 +1,93 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <embedder_texture/camera.h>
#include <embedder_texture/camera_egl_helper.h>
#include <embedder_texture/camera_pixels_helper.h>
Camera::Camera(TextureRegistrar *plugin)
: m_plugin(plugin)
, m_manager(StreamCameraManager())
{
CameraEGLHelper::EGLInit();
}
void Camera::InitializeCamera(int cameraID)
{
if (m_cameraId != cameraID && m_manager->getNumberOfCameras()) {
Aurora::StreamCamera::CameraInfo info;
if (m_manager->getCameraInfo(cameraID, info)) {
m_cameraId = cameraID;
m_camera = m_manager->openCamera(info.id);
m_camera->setListener(this);
}
}
}
void Camera::StartCapture()
{
if (m_camera) {
Aurora::StreamCamera::CameraInfo info;
if (m_camera->getInfo(info)) {
std::vector<Aurora::StreamCamera::CameraCapability> caps;
if (m_manager->queryCapabilities(info.id, caps)) {
m_camera->startCapture(caps.at(1));
}
}
}
}
void Camera::StopCapture()
{
if (m_camera) {
m_cameraId = -1;
m_buffer = nullptr;
m_camera->stopCapture();
}
}
int64_t Camera::Register(int cameraID)
{
m_textureId = m_plugin->RegisterTexture(
[this](size_t width, size_t height) { return this->m_buffer; });
InitializeCamera(cameraID);
StartCapture();
return m_textureId;
}
void Camera::Unregister()
{
StopCapture();
m_plugin->UnregisterTexture(m_textureId);
}
void Camera::onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer)
{
if (buffer->handleType == Aurora::StreamCamera::HandleType::EGL) {
// @todo Not tested. The device needs to be completed.
auto eglImage = CameraEGLHelper::EGLCreateImage(buffer);
this->m_buffer = new TextureVariant(FlutterEGLImage{eglImage});
} else {
auto pixels = CameraPixelsHelper::YUVtoARGB(buffer->mapYCbCr());
this->m_buffer = new TextureVariant(FlutterPixelBuffer{pixels});
}
m_plugin->MarkTextureAvailable(m_textureId);
}
void Camera::onCameraError(const std::string &errorDescription)
{
std::cout << "onCameraError" << std::endl;
}
void Camera::onCameraParameterChanged(Aurora::StreamCamera::CameraParameter parameter,
const std::string &value)
{
std::cout << "onCameraParameterChanged" << std::endl;
}

42
packages/embedder_texture/aurora/camera_egl_helper.cpp

@ -0,0 +1,42 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <embedder_texture/camera_egl_helper.h>
#include <flutter/platform-methods.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
static PFNEGLCREATEIMAGEKHRPROC eglCreateImageKHR;
static PFNEGLDESTROYIMAGEKHRPROC eglDestroyImageKHR;
void CameraEGLHelper::EGLInit()
{
eglCreateImageKHR = reinterpret_cast<PFNEGLCREATEIMAGEKHRPROC>(
eglGetProcAddress("eglCreateImageKHR"));
eglDestroyImageKHR = reinterpret_cast<PFNEGLDESTROYIMAGEKHRPROC>(
eglGetProcAddress("eglDestroyImageKHR"));
}
EGLImageKHR CameraEGLHelper::EGLCreateImage(
std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer)
{
auto display = PlatformMethods::GetEGLDisplay();
auto context = PlatformMethods::GetEGLContext();
const void *handle = buffer->handle;
GLint eglImgAttrs[] = {EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE, EGL_NONE};
return eglCreateImageKHR(display,
context,
EGL_NATIVE_BUFFER_ANDROID,
(EGLClientBuffer) handle,
eglImgAttrs);
}
void CameraEGLHelper::EGLDestroyImage(EGLImageKHR image)
{
auto display = PlatformMethods::GetEGLDisplay();
eglDestroyImageKHR(display, image);
}

106
packages/embedder_texture/aurora/camera_helper.cpp

@ -0,0 +1,106 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <embedder_texture/camera_helper.h>
static PFNEGLCREATEIMAGEKHRPROC eglCreateImageKHR;
static PFNEGLDESTROYIMAGEKHRPROC eglDestroyImageKHR;
void CameraHelper::EGLInit()
{
eglCreateImageKHR = reinterpret_cast<PFNEGLCREATEIMAGEKHRPROC>(
eglGetProcAddress("eglCreateImageKHR"));
eglDestroyImageKHR = reinterpret_cast<PFNEGLDESTROYIMAGEKHRPROC>(
eglGetProcAddress("eglDestroyImageKHR"));
}
EGLImageKHR CameraHelper::EGLCreateImage(
std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame)
{
auto eglDisplay = PlatformMethods::GetEGLDisplay();
auto eglContext = PlatformMethods::GetEGLContext();
const void *handle = buffer->handle;
GLint eglImgAttrs[] = {EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE, EGL_NONE};
return s_eglCreateImageKHR(eglDisplay,
eglContext,
EGL_NATIVE_BUFFER_ANDROID,
(EGLClientBuffer) handle,
eglImgAttrs);
}
void CameraHelper::EGLDestroyImage(EGLImageKHR eglImage, EGLDisplay eglDisplay)
{
return eglDestroyImageKHR(eglDisplay, eglImage);
}
uint8_t *CameraHelper::YUVtoARGB(std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame)
{
QSize size(frame->width, frame->height);
QImage *image = new QImage(size, QImage::Format_ARGB32);
planarYuv420ToArgb(frame->y,
frame->cb,
frame->cr,
frame->yStride,
frame->cStride,
frame->cStride,
frame->chromaStep,
reinterpret_cast<quint32 *>(image->bits()),
frame->width,
frame->height);
return static_cast<uint8_t *>(image->bits());
}
quint32 CameraHelper::yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a = 255)
{
qint32 yy = (y - 16) * 298;
return (a << 24) | qBound(0, (yy + rv) >> 8, 255) << 16 | qBound(0, (yy - guv) >> 8, 255) << 8
| qBound(0, (yy + bu) >> 8, 255);
}
void CameraHelper::planarYuv420ToArgb(const uchar *y,
const uchar *u,
const uchar *v,
qint32 yStride,
qint32 uStride,
qint32 vStride,
qint32 uvPixelStride,
quint32 *rgb,
qint32 width,
qint32 height)
{
quint32 *rgb0 = rgb;
quint32 *rgb1 = rgb + width;
for (qint32 j = 0; j < height; j += 2) {
const uchar *lineY0 = y;
const uchar *lineY1 = y + yStride;
const uchar *lineU = u;
const uchar *lineV = v;
for (qint32 i = 0; i < width; i += 2) {
const qint32 uu = *lineU - 128;
const qint32 vv = *lineV - 128;
const qint32 rv = 409 * vv + 128;
const qint32 guv = 100 * uu + 208 * vv + 128;
const qint32 bu = 516 * uu + 128;
lineU += uvPixelStride;
lineV += uvPixelStride;
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu);
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu);
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu);
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu);
}
y += yStride << 1;
u += uStride;
v += vStride;
rgb0 += width;
rgb1 += width;
}
}

124
packages/embedder_texture/aurora/camera_pixel_buffer.cpp

@ -1,124 +0,0 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <embedder_texture/camera_pixel_buffer.h>
CameraPixelBuffer::CameraPixelBuffer(TextureRegistrar *plugin) : m_plugin(plugin)
{
Aurora::StreamCamera::CameraManager *manager = StreamCameraManager();
if (manager->getNumberOfCameras())
{
Aurora::StreamCamera::CameraInfo info;
if (manager->getCameraInfo(0, info)) // Camera ID = 0
{
std::vector<Aurora::StreamCamera::CameraCapability> caps;
if (manager->queryCapabilities(info.id, caps))
{
m_cap = caps.at(1);
m_camera = manager->openCamera(info.id);
m_camera->setListener(this);
}
}
}
};
int64_t CameraPixelBuffer::Register()
{
m_textureId = m_plugin->RegisterTexture(TextureType::Pixels, [this](size_t width, size_t height) {
if (this->m_buffer) {
return TextureVariant(FlutterPixelBuffer{ this->m_buffer->buffer } );
}
return TextureVariant(FlutterPixelBuffer{});
});
m_camera->startCapture(m_cap);
return m_textureId;
}
void CameraPixelBuffer::Unregister()
{
m_plugin->UnregisterTexture(m_textureId);
m_camera->stopCapture();
m_buffer = nullptr;
}
void CameraPixelBuffer::onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer)
{
auto frame = buffer->mapYCbCr();
QSize size(frame->width, frame->height);
QImage* image = new QImage(size, QImage::Format_ARGB32);
this->planarYuv420ToArgb(frame->y, frame->cb, frame->cr,
frame->yStride, frame->cStride, frame->cStride,
frame->chromaStep,
reinterpret_cast<quint32 *>(image->bits()),
frame->width, frame->height);
uint8_t *pixels = static_cast<uint8_t *>(image->bits());
this->m_buffer = new FlutterPixelBuffer { pixels };
m_plugin->MarkTextureAvailable(m_textureId);
}
void CameraPixelBuffer::onCameraError(const std::string &errorDescription)
{
std::cout << "onCameraError" << std::endl;
}
void CameraPixelBuffer::onCameraParameterChanged(Aurora::StreamCamera::CameraParameter parameter, const std::string &value)
{
std::cout << "onCameraParameterChanged" << std::endl;
}
quint32 CameraPixelBuffer::yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a = 255)
{
qint32 yy = (y - 16) * 298;
return (a << 24)
| qBound(0, (yy + rv) >> 8, 255) << 16
| qBound(0, (yy - guv) >> 8, 255) << 8
| qBound(0, (yy + bu) >> 8, 255);
}
void CameraPixelBuffer::planarYuv420ToArgb(const uchar *y, const uchar *u, const uchar *v,
qint32 yStride, qint32 uStride, qint32 vStride,
qint32 uvPixelStride, quint32 *rgb, qint32 width, qint32 height)
{
quint32 *rgb0 = rgb;
quint32 *rgb1 = rgb + width;
for (qint32 j = 0; j < height; j += 2) {
const uchar *lineY0 = y;
const uchar *lineY1 = y + yStride;
const uchar *lineU = u;
const uchar *lineV = v;
for (qint32 i = 0; i < width; i += 2) {
const qint32 uu = *lineU - 128;
const qint32 vv = *lineV - 128;
const qint32 rv = 409 * vv + 128;
const qint32 guv = 100 * uu + 208 * vv + 128;
const qint32 bu = 516 * uu + 128;
lineU += uvPixelStride;
lineV += uvPixelStride;
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu);
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu);
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu);
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu);
}
y += yStride << 1;
u += uStride;
v += vStride;
rgb0 += width;
rgb1 += width;
}
}

77
packages/embedder_texture/aurora/camera_pixels_helper.cpp

@ -0,0 +1,77 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <embedder_texture/camera_pixels_helper.h>
uint8_t *CameraPixelsHelper::YUVtoARGB(std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame)
{
QSize size(frame->width, frame->height);
QImage *image = new QImage(size, QImage::Format_ARGB32);
planarYuv420ToArgb(frame->y,
frame->cb,
frame->cr,
frame->yStride,
frame->cStride,
frame->cStride,
frame->chromaStep,
reinterpret_cast<quint32 *>(image->bits()),
frame->width,
frame->height);
return static_cast<uint8_t *>(image->bits());
}
quint32 CameraPixelsHelper::yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a = 255)
{
qint32 yy = (y - 16) * 298;
return (a << 24)
| qBound(0, (yy + rv) >> 8, 255) << 16
| qBound(0, (yy - guv) >> 8, 255) << 8
| qBound(0, (yy + bu) >> 8, 255);
}
void CameraPixelsHelper::planarYuv420ToArgb(const uchar *y,
const uchar *u,
const uchar *v,
qint32 yStride,
qint32 uStride,
qint32 vStride,
qint32 uvPixelStride,
quint32 *rgb,
qint32 width,
qint32 height)
{
quint32 *rgb0 = rgb;
quint32 *rgb1 = rgb + width;
for (qint32 j = 0; j < height; j += 2) {
const uchar *lineY0 = y;
const uchar *lineY1 = y + yStride;
const uchar *lineU = u;
const uchar *lineV = v;
for (qint32 i = 0; i < width; i += 2) {
const qint32 uu = *lineU - 128;
const qint32 vv = *lineV - 128;
const qint32 rv = 409 * vv + 128;
const qint32 guv = 100 * uu + 208 * vv + 128;
const qint32 bu = 516 * uu + 128;
lineU += uvPixelStride;
lineV += uvPixelStride;
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu);
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu);
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu);
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu);
}
y += yStride << 1;
u += uStride;
v += vStride;
rgb0 += width;
rgb1 += width;
}
}

24
packages/embedder_texture/aurora/embedder_texture_plugin.cpp

@ -4,47 +4,49 @@
*/
#include <embedder_texture/embedder_texture_plugin.h>
#include <flutter/method-channel.h>
#include <flutter/platform-methods.h>
void EmbedderTexturePlugin::RegisterWithRegistrar(PluginRegistrar &registrar)
{
TextureRegistrar *plugin = registrar.GetRegisterTexture();
m_cameraPixelBuffer = new CameraPixelBuffer(plugin);
m_camera = new Camera(plugin);
registrar.RegisterMethodChannel("embedder_texture",
MethodCodecType::Standard,
[this, plugin](const MethodCall &call) { this->onMethodCall(call, plugin); });
[this](const MethodCall &call) { this->onMethodCall(call); });
}
void EmbedderTexturePlugin::onMethodCall(const MethodCall &call, TextureRegistrar *plugin) {
void EmbedderTexturePlugin::onMethodCall(const MethodCall &call)
{
const auto &method = call.GetMethod();
if (method == "create") {
onCreate(call, plugin);
onCreate(call);
return;
}
if (method == "remove") {
onRemove(call, plugin);
onRemove(call);
return;
}
unimplemented(call);
}
void EmbedderTexturePlugin::onCreate(const MethodCall &call, TextureRegistrar *plugin)
void EmbedderTexturePlugin::onCreate(const MethodCall &call)
{
auto textureId = m_cameraPixelBuffer->Register();
auto cameraId = 0;
auto textureId = m_camera->Register(cameraId);
call.SendSuccessResponse(textureId);
}
void EmbedderTexturePlugin::onRemove(const MethodCall &call, TextureRegistrar *plugin)
void EmbedderTexturePlugin::onRemove(const MethodCall &call)
{
auto textureId = call.GetArgument<Encodable::Int>("textureId");
m_cameraPixelBuffer->Unregister();
m_camera->Unregister();
call.SendSuccessResponse(true);
}

33
packages/embedder_texture/aurora/include/embedder_texture/camera_pixel_buffer.h → packages/embedder_texture/aurora/include/embedder_texture/camera.h

@ -2,38 +2,43 @@
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef CAMERA_PIXEL_BUFFER_H
#define CAMERA_PIXEL_BUFFER_H
#ifndef CAMERA_BUFFER_H
#define CAMERA_BUFFER_H
#include <flutter/plugin-interface.h>
#include <streamcamera/streamcamera.h>
#include <QtCore>
#include <QImage>
#include <QtCore>
class CameraPixelBuffer : public Aurora::StreamCamera::CameraListener
class Camera : public Aurora::StreamCamera::CameraListener
{
public:
CameraPixelBuffer(TextureRegistrar *plugin);
Camera(TextureRegistrar *plugin);
void onCameraError(const std::string &errorDescription) override;
void onCameraParameterChanged(Aurora::StreamCamera::CameraParameter, const std::string &value) override;
void onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer) override;
void onCameraParameterChanged(Aurora::StreamCamera::CameraParameter,
const std::string &value) override;
int64_t Register();
int64_t Register(int cameraID);
void Unregister();
private:
quint32 yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a);
void planarYuv420ToArgb(const uchar *y, const uchar *u, const uchar *v, qint32 yStride, qint32 uStride, qint32 vStride, qint32 uvPixelStride, quint32 *rgb, qint32 width, qint32 height);
void InitializeCamera(int cameraID);
void StartCapture();
void StopCapture();
private:
int64_t m_textureId;
TextureRegistrar *m_plugin;
FlutterPixelBuffer *m_buffer;
Aurora::StreamCamera::CameraCapability m_cap;
Aurora::StreamCamera::CameraManager *m_manager;
std::shared_ptr<Aurora::StreamCamera::Camera> m_camera;
int m_cameraId = -1;
int64_t m_textureId;
TextureVariant *m_buffer;
};
#endif /* CAMERA_PIXEL_BUFFER_H */
#endif /* CAMERA_BUFFER_H */

21
packages/embedder_texture/aurora/include/embedder_texture/camera_egl_helper.h

@ -0,0 +1,21 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef CAMERA_EGL_HELPER_H
#define CAMERA_EGL_HELPER_H
#include <streamcamera/streamcamera.h>
#include <EGL/egl.h>
#include <EGL/eglext.h>
class CameraEGLHelper
{
public:
static void EGLInit();
static EGLImageKHR EGLCreateImage(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer);
static void EGLDestroyImage(EGLImageKHR image);
};
#endif /* CAMERA_EGL_HELPER_H */

33
packages/embedder_texture/aurora/include/embedder_texture/camera_pixels_helper.h

@ -0,0 +1,33 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef CAMERA_PIXELS_HELPER_H
#define CAMERA_PIXELS_HELPER_H
#include <flutter/plugin-interface.h>
#include <streamcamera/streamcamera.h>
#include <QImage>
#include <QtCore>
class CameraPixelsHelper
{
public:
static uint8_t *YUVtoARGB(std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame);
private:
static quint32 yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a);
static void planarYuv420ToArgb(const uchar *y,
const uchar *u,
const uchar *v,
qint32 yStride,
qint32 uStride,
qint32 vStride,
qint32 uvPixelStride,
quint32 *rgb,
qint32 width,
qint32 height);
};
#endif /* CAMERA_PIXELS_HELPER_H */

10
packages/embedder_texture/aurora/include/embedder_texture/embedder_texture_plugin.h

@ -7,7 +7,7 @@
#include <flutter/plugin-interface.h>
#include <embedder_texture/camera_pixel_buffer.h>
#include <embedder_texture/camera.h>
#ifdef PLUGIN_IMPL
#define PLUGIN_EXPORT __attribute__((visibility("default")))
@ -21,13 +21,13 @@ public:
void RegisterWithRegistrar(PluginRegistrar &registrar) override;
private:
void onMethodCall(const MethodCall &call, TextureRegistrar *plugin);
void onCreate(const MethodCall &call, TextureRegistrar *plugin);
void onRemove(const MethodCall &call, TextureRegistrar *plugin);
void onMethodCall(const MethodCall &call);
void onCreate(const MethodCall &call);
void onRemove(const MethodCall &call);
void unimplemented(const MethodCall &call);
private:
CameraPixelBuffer *m_cameraPixelBuffer;
Camera *m_camera;
};
#endif /* EMBEDDER_TEXTURE_PLUGIN_H */

Loading…
Cancel
Save