Vitaliy Zarubin
1 year ago
11 changed files with 412 additions and 155 deletions
@ -0,0 +1,93 @@
|
||||
/**
|
||||
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||
* SPDX-License-Identifier: BSD-3-Clause |
||||
*/ |
||||
#include <embedder_texture/camera.h> |
||||
#include <embedder_texture/camera_egl_helper.h> |
||||
#include <embedder_texture/camera_pixels_helper.h> |
||||
|
||||
Camera::Camera(TextureRegistrar *plugin) |
||||
: m_plugin(plugin) |
||||
, m_manager(StreamCameraManager()) |
||||
{ |
||||
CameraEGLHelper::EGLInit(); |
||||
} |
||||
|
||||
void Camera::InitializeCamera(int cameraID) |
||||
{ |
||||
if (m_cameraId != cameraID && m_manager->getNumberOfCameras()) { |
||||
Aurora::StreamCamera::CameraInfo info; |
||||
|
||||
if (m_manager->getCameraInfo(cameraID, info)) { |
||||
m_cameraId = cameraID; |
||||
m_camera = m_manager->openCamera(info.id); |
||||
m_camera->setListener(this); |
||||
} |
||||
} |
||||
} |
||||
|
||||
void Camera::StartCapture() |
||||
{ |
||||
if (m_camera) { |
||||
Aurora::StreamCamera::CameraInfo info; |
||||
|
||||
if (m_camera->getInfo(info)) { |
||||
std::vector<Aurora::StreamCamera::CameraCapability> caps; |
||||
|
||||
if (m_manager->queryCapabilities(info.id, caps)) { |
||||
m_camera->startCapture(caps.at(1)); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
void Camera::StopCapture() |
||||
{ |
||||
if (m_camera) { |
||||
m_cameraId = -1; |
||||
m_buffer = nullptr; |
||||
m_camera->stopCapture(); |
||||
} |
||||
} |
||||
|
||||
int64_t Camera::Register(int cameraID) |
||||
{ |
||||
m_textureId = m_plugin->RegisterTexture( |
||||
[this](size_t width, size_t height) { return this->m_buffer; }); |
||||
|
||||
InitializeCamera(cameraID); |
||||
StartCapture(); |
||||
|
||||
return m_textureId; |
||||
} |
||||
|
||||
void Camera::Unregister() |
||||
{ |
||||
StopCapture(); |
||||
m_plugin->UnregisterTexture(m_textureId); |
||||
} |
||||
|
||||
void Camera::onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer) |
||||
{ |
||||
if (buffer->handleType == Aurora::StreamCamera::HandleType::EGL) { |
||||
// @todo Not tested. The device needs to be completed.
|
||||
auto eglImage = CameraEGLHelper::EGLCreateImage(buffer); |
||||
this->m_buffer = new TextureVariant(FlutterEGLImage{eglImage}); |
||||
} else { |
||||
auto pixels = CameraPixelsHelper::YUVtoARGB(buffer->mapYCbCr()); |
||||
this->m_buffer = new TextureVariant(FlutterPixelBuffer{pixels}); |
||||
} |
||||
|
||||
m_plugin->MarkTextureAvailable(m_textureId); |
||||
} |
||||
|
||||
void Camera::onCameraError(const std::string &errorDescription) |
||||
{ |
||||
std::cout << "onCameraError" << std::endl; |
||||
} |
||||
|
||||
void Camera::onCameraParameterChanged(Aurora::StreamCamera::CameraParameter parameter, |
||||
const std::string &value) |
||||
{ |
||||
std::cout << "onCameraParameterChanged" << std::endl; |
||||
} |
@ -0,0 +1,42 @@
|
||||
/**
|
||||
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||
* SPDX-License-Identifier: BSD-3-Clause |
||||
*/ |
||||
#include <embedder_texture/camera_egl_helper.h> |
||||
#include <flutter/platform-methods.h> |
||||
|
||||
#include <GLES2/gl2.h> |
||||
#include <GLES2/gl2ext.h> |
||||
|
||||
static PFNEGLCREATEIMAGEKHRPROC eglCreateImageKHR; |
||||
static PFNEGLDESTROYIMAGEKHRPROC eglDestroyImageKHR; |
||||
|
||||
void CameraEGLHelper::EGLInit() |
||||
{ |
||||
eglCreateImageKHR = reinterpret_cast<PFNEGLCREATEIMAGEKHRPROC>( |
||||
eglGetProcAddress("eglCreateImageKHR")); |
||||
eglDestroyImageKHR = reinterpret_cast<PFNEGLDESTROYIMAGEKHRPROC>( |
||||
eglGetProcAddress("eglDestroyImageKHR")); |
||||
} |
||||
|
||||
EGLImageKHR CameraEGLHelper::EGLCreateImage( |
||||
std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer) |
||||
{ |
||||
auto display = PlatformMethods::GetEGLDisplay(); |
||||
auto context = PlatformMethods::GetEGLContext(); |
||||
|
||||
const void *handle = buffer->handle; |
||||
GLint eglImgAttrs[] = {EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE, EGL_NONE}; |
||||
return eglCreateImageKHR(display, |
||||
context, |
||||
EGL_NATIVE_BUFFER_ANDROID, |
||||
(EGLClientBuffer) handle, |
||||
eglImgAttrs); |
||||
} |
||||
|
||||
void CameraEGLHelper::EGLDestroyImage(EGLImageKHR image) |
||||
{ |
||||
auto display = PlatformMethods::GetEGLDisplay(); |
||||
|
||||
eglDestroyImageKHR(display, image); |
||||
} |
@ -0,0 +1,106 @@
|
||||
/**
|
||||
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||
* SPDX-License-Identifier: BSD-3-Clause |
||||
*/ |
||||
#include <embedder_texture/camera_helper.h> |
||||
|
||||
static PFNEGLCREATEIMAGEKHRPROC eglCreateImageKHR; |
||||
static PFNEGLDESTROYIMAGEKHRPROC eglDestroyImageKHR; |
||||
|
||||
void CameraHelper::EGLInit() |
||||
{ |
||||
eglCreateImageKHR = reinterpret_cast<PFNEGLCREATEIMAGEKHRPROC>( |
||||
eglGetProcAddress("eglCreateImageKHR")); |
||||
eglDestroyImageKHR = reinterpret_cast<PFNEGLDESTROYIMAGEKHRPROC>( |
||||
eglGetProcAddress("eglDestroyImageKHR")); |
||||
} |
||||
|
||||
EGLImageKHR CameraHelper::EGLCreateImage( |
||||
std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame) |
||||
{ |
||||
auto eglDisplay = PlatformMethods::GetEGLDisplay(); |
||||
auto eglContext = PlatformMethods::GetEGLContext(); |
||||
|
||||
const void *handle = buffer->handle; |
||||
GLint eglImgAttrs[] = {EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE, EGL_NONE}; |
||||
return s_eglCreateImageKHR(eglDisplay, |
||||
eglContext, |
||||
EGL_NATIVE_BUFFER_ANDROID, |
||||
(EGLClientBuffer) handle, |
||||
eglImgAttrs); |
||||
} |
||||
|
||||
void CameraHelper::EGLDestroyImage(EGLImageKHR eglImage, EGLDisplay eglDisplay) |
||||
{ |
||||
return eglDestroyImageKHR(eglDisplay, eglImage); |
||||
} |
||||
|
||||
uint8_t *CameraHelper::YUVtoARGB(std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame) |
||||
{ |
||||
QSize size(frame->width, frame->height); |
||||
QImage *image = new QImage(size, QImage::Format_ARGB32); |
||||
|
||||
planarYuv420ToArgb(frame->y, |
||||
frame->cb, |
||||
frame->cr, |
||||
frame->yStride, |
||||
frame->cStride, |
||||
frame->cStride, |
||||
frame->chromaStep, |
||||
reinterpret_cast<quint32 *>(image->bits()), |
||||
frame->width, |
||||
frame->height); |
||||
|
||||
return static_cast<uint8_t *>(image->bits()); |
||||
} |
||||
|
||||
quint32 CameraHelper::yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a = 255) |
||||
{ |
||||
qint32 yy = (y - 16) * 298; |
||||
|
||||
return (a << 24) | qBound(0, (yy + rv) >> 8, 255) << 16 | qBound(0, (yy - guv) >> 8, 255) << 8 |
||||
| qBound(0, (yy + bu) >> 8, 255); |
||||
} |
||||
|
||||
void CameraHelper::planarYuv420ToArgb(const uchar *y, |
||||
const uchar *u, |
||||
const uchar *v, |
||||
qint32 yStride, |
||||
qint32 uStride, |
||||
qint32 vStride, |
||||
qint32 uvPixelStride, |
||||
quint32 *rgb, |
||||
qint32 width, |
||||
qint32 height) |
||||
{ |
||||
quint32 *rgb0 = rgb; |
||||
quint32 *rgb1 = rgb + width; |
||||
|
||||
for (qint32 j = 0; j < height; j += 2) { |
||||
const uchar *lineY0 = y; |
||||
const uchar *lineY1 = y + yStride; |
||||
const uchar *lineU = u; |
||||
const uchar *lineV = v; |
||||
|
||||
for (qint32 i = 0; i < width; i += 2) { |
||||
const qint32 uu = *lineU - 128; |
||||
const qint32 vv = *lineV - 128; |
||||
const qint32 rv = 409 * vv + 128; |
||||
const qint32 guv = 100 * uu + 208 * vv + 128; |
||||
const qint32 bu = 516 * uu + 128; |
||||
|
||||
lineU += uvPixelStride; |
||||
lineV += uvPixelStride; |
||||
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu); |
||||
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu); |
||||
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu); |
||||
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu); |
||||
} |
||||
|
||||
y += yStride << 1; |
||||
u += uStride; |
||||
v += vStride; |
||||
rgb0 += width; |
||||
rgb1 += width; |
||||
} |
||||
} |
@ -1,124 +0,0 @@
|
||||
/**
|
||||
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||
* SPDX-License-Identifier: BSD-3-Clause |
||||
*/ |
||||
#include <embedder_texture/camera_pixel_buffer.h> |
||||
|
||||
CameraPixelBuffer::CameraPixelBuffer(TextureRegistrar *plugin) : m_plugin(plugin) |
||||
{ |
||||
Aurora::StreamCamera::CameraManager *manager = StreamCameraManager(); |
||||
|
||||
if (manager->getNumberOfCameras()) |
||||
{ |
||||
Aurora::StreamCamera::CameraInfo info; |
||||
|
||||
if (manager->getCameraInfo(0, info)) // Camera ID = 0
|
||||
{ |
||||
std::vector<Aurora::StreamCamera::CameraCapability> caps; |
||||
|
||||
if (manager->queryCapabilities(info.id, caps)) |
||||
{ |
||||
m_cap = caps.at(1); |
||||
m_camera = manager->openCamera(info.id); |
||||
m_camera->setListener(this); |
||||
} |
||||
} |
||||
} |
||||
}; |
||||
|
||||
int64_t CameraPixelBuffer::Register() |
||||
{ |
||||
m_textureId = m_plugin->RegisterTexture(TextureType::Pixels, [this](size_t width, size_t height) { |
||||
if (this->m_buffer) { |
||||
return TextureVariant(FlutterPixelBuffer{ this->m_buffer->buffer } ); |
||||
} |
||||
return TextureVariant(FlutterPixelBuffer{}); |
||||
}); |
||||
|
||||
m_camera->startCapture(m_cap); |
||||
|
||||
return m_textureId; |
||||
} |
||||
|
||||
void CameraPixelBuffer::Unregister() |
||||
{ |
||||
m_plugin->UnregisterTexture(m_textureId); |
||||
m_camera->stopCapture(); |
||||
m_buffer = nullptr; |
||||
} |
||||
|
||||
void CameraPixelBuffer::onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer) |
||||
{ |
||||
auto frame = buffer->mapYCbCr(); |
||||
|
||||
QSize size(frame->width, frame->height); |
||||
QImage* image = new QImage(size, QImage::Format_ARGB32); |
||||
|
||||
this->planarYuv420ToArgb(frame->y, frame->cb, frame->cr, |
||||
frame->yStride, frame->cStride, frame->cStride, |
||||
frame->chromaStep, |
||||
reinterpret_cast<quint32 *>(image->bits()), |
||||
frame->width, frame->height); |
||||
|
||||
uint8_t *pixels = static_cast<uint8_t *>(image->bits()); |
||||
|
||||
this->m_buffer = new FlutterPixelBuffer { pixels }; |
||||
|
||||
m_plugin->MarkTextureAvailable(m_textureId); |
||||
} |
||||
|
||||
void CameraPixelBuffer::onCameraError(const std::string &errorDescription) |
||||
{ |
||||
std::cout << "onCameraError" << std::endl; |
||||
} |
||||
|
||||
void CameraPixelBuffer::onCameraParameterChanged(Aurora::StreamCamera::CameraParameter parameter, const std::string &value)
|
||||
{ |
||||
std::cout << "onCameraParameterChanged" << std::endl; |
||||
} |
||||
|
||||
quint32 CameraPixelBuffer::yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a = 255) |
||||
{ |
||||
qint32 yy = (y - 16) * 298; |
||||
|
||||
return (a << 24) |
||||
| qBound(0, (yy + rv) >> 8, 255) << 16 |
||||
| qBound(0, (yy - guv) >> 8, 255) << 8 |
||||
| qBound(0, (yy + bu) >> 8, 255); |
||||
} |
||||
|
||||
void CameraPixelBuffer::planarYuv420ToArgb(const uchar *y, const uchar *u, const uchar *v, |
||||
qint32 yStride, qint32 uStride, qint32 vStride, |
||||
qint32 uvPixelStride, quint32 *rgb, qint32 width, qint32 height) |
||||
{ |
||||
quint32 *rgb0 = rgb; |
||||
quint32 *rgb1 = rgb + width; |
||||
|
||||
for (qint32 j = 0; j < height; j += 2) { |
||||
const uchar *lineY0 = y; |
||||
const uchar *lineY1 = y + yStride; |
||||
const uchar *lineU = u; |
||||
const uchar *lineV = v; |
||||
|
||||
for (qint32 i = 0; i < width; i += 2) { |
||||
const qint32 uu = *lineU - 128; |
||||
const qint32 vv = *lineV - 128; |
||||
const qint32 rv = 409 * vv + 128; |
||||
const qint32 guv = 100 * uu + 208 * vv + 128; |
||||
const qint32 bu = 516 * uu + 128; |
||||
|
||||
lineU += uvPixelStride; |
||||
lineV += uvPixelStride; |
||||
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu); |
||||
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu); |
||||
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu); |
||||
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu); |
||||
} |
||||
|
||||
y += yStride << 1; |
||||
u += uStride; |
||||
v += vStride; |
||||
rgb0 += width; |
||||
rgb1 += width; |
||||
} |
||||
} |
@ -0,0 +1,77 @@
|
||||
/**
|
||||
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||
* SPDX-License-Identifier: BSD-3-Clause |
||||
*/ |
||||
#include <embedder_texture/camera_pixels_helper.h> |
||||
|
||||
uint8_t *CameraPixelsHelper::YUVtoARGB(std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame) |
||||
{ |
||||
QSize size(frame->width, frame->height); |
||||
QImage *image = new QImage(size, QImage::Format_ARGB32); |
||||
|
||||
planarYuv420ToArgb(frame->y, |
||||
frame->cb, |
||||
frame->cr, |
||||
frame->yStride, |
||||
frame->cStride, |
||||
frame->cStride, |
||||
frame->chromaStep, |
||||
reinterpret_cast<quint32 *>(image->bits()), |
||||
frame->width, |
||||
frame->height); |
||||
|
||||
return static_cast<uint8_t *>(image->bits()); |
||||
} |
||||
|
||||
quint32 CameraPixelsHelper::yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a = 255) |
||||
{ |
||||
qint32 yy = (y - 16) * 298; |
||||
|
||||
return (a << 24) |
||||
| qBound(0, (yy + rv) >> 8, 255) << 16 |
||||
| qBound(0, (yy - guv) >> 8, 255) << 8 |
||||
| qBound(0, (yy + bu) >> 8, 255); |
||||
} |
||||
|
||||
void CameraPixelsHelper::planarYuv420ToArgb(const uchar *y, |
||||
const uchar *u, |
||||
const uchar *v, |
||||
qint32 yStride, |
||||
qint32 uStride, |
||||
qint32 vStride, |
||||
qint32 uvPixelStride, |
||||
quint32 *rgb, |
||||
qint32 width, |
||||
qint32 height) |
||||
{ |
||||
quint32 *rgb0 = rgb; |
||||
quint32 *rgb1 = rgb + width; |
||||
|
||||
for (qint32 j = 0; j < height; j += 2) { |
||||
const uchar *lineY0 = y; |
||||
const uchar *lineY1 = y + yStride; |
||||
const uchar *lineU = u; |
||||
const uchar *lineV = v; |
||||
|
||||
for (qint32 i = 0; i < width; i += 2) { |
||||
const qint32 uu = *lineU - 128; |
||||
const qint32 vv = *lineV - 128; |
||||
const qint32 rv = 409 * vv + 128; |
||||
const qint32 guv = 100 * uu + 208 * vv + 128; |
||||
const qint32 bu = 516 * uu + 128; |
||||
|
||||
lineU += uvPixelStride; |
||||
lineV += uvPixelStride; |
||||
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu); |
||||
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu); |
||||
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu); |
||||
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu); |
||||
} |
||||
|
||||
y += yStride << 1; |
||||
u += uStride; |
||||
v += vStride; |
||||
rgb0 += width; |
||||
rgb1 += width; |
||||
} |
||||
} |
@ -0,0 +1,21 @@
|
||||
/**
|
||||
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||
* SPDX-License-Identifier: BSD-3-Clause |
||||
*/ |
||||
#ifndef CAMERA_EGL_HELPER_H |
||||
#define CAMERA_EGL_HELPER_H |
||||
|
||||
#include <streamcamera/streamcamera.h> |
||||
|
||||
#include <EGL/egl.h> |
||||
#include <EGL/eglext.h> |
||||
|
||||
class CameraEGLHelper |
||||
{ |
||||
public: |
||||
static void EGLInit(); |
||||
static EGLImageKHR EGLCreateImage(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer); |
||||
static void EGLDestroyImage(EGLImageKHR image); |
||||
}; |
||||
|
||||
#endif /* CAMERA_EGL_HELPER_H */ |
@ -0,0 +1,33 @@
|
||||
/**
|
||||
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||
* SPDX-License-Identifier: BSD-3-Clause |
||||
*/ |
||||
#ifndef CAMERA_PIXELS_HELPER_H |
||||
#define CAMERA_PIXELS_HELPER_H |
||||
|
||||
#include <flutter/plugin-interface.h> |
||||
#include <streamcamera/streamcamera.h> |
||||
|
||||
#include <QImage> |
||||
#include <QtCore> |
||||
|
||||
class CameraPixelsHelper |
||||
{ |
||||
public: |
||||
static uint8_t *YUVtoARGB(std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame); |
||||
|
||||
private: |
||||
static quint32 yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a); |
||||
static void planarYuv420ToArgb(const uchar *y, |
||||
const uchar *u, |
||||
const uchar *v, |
||||
qint32 yStride, |
||||
qint32 uStride, |
||||
qint32 vStride, |
||||
qint32 uvPixelStride, |
||||
quint32 *rgb, |
||||
qint32 width, |
||||
qint32 height); |
||||
}; |
||||
|
||||
#endif /* CAMERA_PIXELS_HELPER_H */ |
Loading…
Reference in new issue