Vitaliy Zarubin
1 year ago
19 changed files with 748 additions and 613 deletions
@ -0,0 +1,57 @@ |
|||||||
|
/**
|
||||||
|
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||||
|
* SPDX-License-Identifier: BSD-3-Clause |
||||||
|
*/ |
||||||
|
#ifndef TEXTURE_CAMERA_BUFFER_H |
||||||
|
#define TEXTURE_CAMERA_BUFFER_H |
||||||
|
|
||||||
|
#include <flutter/plugin-interface.h> |
||||||
|
#include <streamcamera/streamcamera.h> |
||||||
|
|
||||||
|
#include <QImage> |
||||||
|
#include <QtCore> |
||||||
|
|
||||||
|
typedef std::function<void()> CameraErrorHandler; |
||||||
|
|
||||||
|
class TextureCamera : public Aurora::StreamCamera::CameraListener |
||||||
|
{ |
||||||
|
public: |
||||||
|
TextureCamera(TextureRegistrar *plugin, const CameraErrorHandler &onError); |
||||||
|
|
||||||
|
void onCameraError(const std::string &errorDescription) override; |
||||||
|
void onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer) override; |
||||||
|
void onCameraParameterChanged(Aurora::StreamCamera::CameraParameter, |
||||||
|
const std::string &value) override; |
||||||
|
|
||||||
|
std::vector<Encodable> GetAvailableCameras(); |
||||||
|
std::map<Encodable, Encodable> Register(std::string cameraName); |
||||||
|
std::map<Encodable, Encodable> Unregister(); |
||||||
|
std::map<Encodable, Encodable> StartCapture(size_t width, size_t height); |
||||||
|
void StopCapture(); |
||||||
|
std::map<Encodable, Encodable> GetState(); |
||||||
|
std::string GetImageBase64(); |
||||||
|
|
||||||
|
private: |
||||||
|
bool CreateCamera(std::string cameraName); |
||||||
|
void SendError(std::string error); |
||||||
|
|
||||||
|
private: |
||||||
|
TextureRegistrar *m_plugin; |
||||||
|
|
||||||
|
CameraErrorHandler m_onError; |
||||||
|
std::string m_error; |
||||||
|
|
||||||
|
Aurora::StreamCamera::CameraManager *m_manager; |
||||||
|
std::shared_ptr<Aurora::StreamCamera::Camera> m_camera; |
||||||
|
|
||||||
|
int64_t m_textureId = 0; |
||||||
|
size_t m_captureWidth = 0; |
||||||
|
size_t m_captureHeight = 0; |
||||||
|
size_t m_viewWidth = 0; |
||||||
|
size_t m_viewHeight = 0; |
||||||
|
|
||||||
|
TextureVariant *m_variant; |
||||||
|
QImage *m_image; |
||||||
|
}; |
||||||
|
|
||||||
|
#endif /* TEXTURE_CAMERA_BUFFER_H */ |
@ -0,0 +1,21 @@ |
|||||||
|
/**
|
||||||
|
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||||
|
* SPDX-License-Identifier: BSD-3-Clause |
||||||
|
*/ |
||||||
|
#ifndef TEXTURE_CAMERA_EGL_HELPER_H |
||||||
|
#define TEXTURE_CAMERA_EGL_HELPER_H |
||||||
|
|
||||||
|
#include <streamcamera/streamcamera.h> |
||||||
|
|
||||||
|
#include <EGL/egl.h> |
||||||
|
#include <EGL/eglext.h> |
||||||
|
|
||||||
|
class TextureCameraEGLHelper |
||||||
|
{ |
||||||
|
public: |
||||||
|
static void EGLInit(); |
||||||
|
static EGLImageKHR EGLCreateImage(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer); |
||||||
|
static void EGLDestroyImage(EGLImageKHR image); |
||||||
|
}; |
||||||
|
|
||||||
|
#endif /* TEXTURE_CAMERA_EGL_HELPER_H */ |
@ -0,0 +1,33 @@ |
|||||||
|
/**
|
||||||
|
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||||
|
* SPDX-License-Identifier: BSD-3-Clause |
||||||
|
*/ |
||||||
|
#ifndef TEXTURE_CAMERA_PIXELS_HELPER_H |
||||||
|
#define TEXTURE_CAMERA_PIXELS_HELPER_H |
||||||
|
|
||||||
|
#include <flutter/plugin-interface.h> |
||||||
|
#include <streamcamera/streamcamera.h> |
||||||
|
|
||||||
|
#include <QImage> |
||||||
|
#include <QtCore> |
||||||
|
|
||||||
|
class TextureCameraPixelsHelper |
||||||
|
{ |
||||||
|
public: |
||||||
|
static QImage *YUVtoARGB(std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame); |
||||||
|
|
||||||
|
private: |
||||||
|
static quint32 yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a); |
||||||
|
static void planarYuv420ToArgb(const uchar *y, |
||||||
|
const uchar *u, |
||||||
|
const uchar *v, |
||||||
|
qint32 yStride, |
||||||
|
qint32 uStride, |
||||||
|
qint32 vStride, |
||||||
|
qint32 uvPixelStride, |
||||||
|
quint32 *rgb, |
||||||
|
qint32 width, |
||||||
|
qint32 height); |
||||||
|
}; |
||||||
|
|
||||||
|
#endif /* TEXTURE_CAMERA_PIXELS_HELPER_H */ |
@ -0,0 +1,218 @@ |
|||||||
|
/**
|
||||||
|
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||||
|
* SPDX-License-Identifier: BSD-3-Clause |
||||||
|
*/ |
||||||
|
#include <camera_aurora/texture_camera.h> |
||||||
|
#include <camera_aurora/texture_camera_egl_helper.h> |
||||||
|
#include <camera_aurora/texture_camera_pixels_helper.h> |
||||||
|
|
||||||
|
#include <flutter/platform-data.h> |
||||||
|
#include <flutter/platform-methods.h> |
||||||
|
|
||||||
|
#include <QtCore> |
||||||
|
#include <QBuffer> |
||||||
|
|
||||||
|
TextureCamera::TextureCamera(TextureRegistrar *plugin, const CameraErrorHandler &onError) |
||||||
|
: m_plugin(plugin) |
||||||
|
, m_onError(onError) |
||||||
|
, m_variant(nullptr) |
||||||
|
, m_image(nullptr) |
||||||
|
, m_manager(StreamCameraManager()) |
||||||
|
{ |
||||||
|
TextureCameraEGLHelper::EGLInit(); |
||||||
|
} |
||||||
|
|
||||||
|
std::vector<Encodable> TextureCamera::GetAvailableCameras() |
||||||
|
{ |
||||||
|
std::vector<Encodable> cameras; |
||||||
|
auto count = m_manager->getNumberOfCameras(); |
||||||
|
|
||||||
|
for (int index = 0; index < count; index++) { |
||||||
|
Aurora::StreamCamera::CameraInfo info; |
||||||
|
if (m_manager->getCameraInfo(index, info)) { |
||||||
|
cameras.push_back(std::map<Encodable, Encodable>{ |
||||||
|
{"id", info.id}, |
||||||
|
{"name", info.name}, |
||||||
|
{"provider", info.provider}, |
||||||
|
{"mountAngle", info.mountAngle}, |
||||||
|
}); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
return cameras; |
||||||
|
} |
||||||
|
|
||||||
|
std::string TextureCamera::GetImageBase64() |
||||||
|
{ |
||||||
|
if (m_image && m_camera) { |
||||||
|
Aurora::StreamCamera::CameraInfo info; |
||||||
|
if (m_camera->getInfo(info)) { |
||||||
|
QBuffer qbuffer; |
||||||
|
qbuffer.open(QIODevice::WriteOnly); |
||||||
|
QImage rotatedImg = m_image->transformed(QMatrix().rotate(info.mountAngle)); |
||||||
|
rotatedImg.save(&qbuffer, "JPEG"); |
||||||
|
return qbuffer.data().toBase64().toStdString(); |
||||||
|
} |
||||||
|
} |
||||||
|
return ""; |
||||||
|
} |
||||||
|
|
||||||
|
std::map<Encodable, Encodable> TextureCamera::GetState() |
||||||
|
{ |
||||||
|
Aurora::StreamCamera::CameraInfo info; |
||||||
|
|
||||||
|
if (m_camera && m_camera->getInfo(info)) { |
||||||
|
|
||||||
|
auto orientation = static_cast<int>(PlatformMethods::GetOrientation()); |
||||||
|
|
||||||
|
return std::map<Encodable, Encodable>{ |
||||||
|
{"id", info.id}, |
||||||
|
{"textureId", m_textureId}, |
||||||
|
{"width", m_captureWidth}, |
||||||
|
{"height", m_captureHeight}, |
||||||
|
{"rotationCamera", info.mountAngle}, |
||||||
|
{"rotationDisplay", orientation}, |
||||||
|
{"error", m_error}, |
||||||
|
}; |
||||||
|
} |
||||||
|
|
||||||
|
return std::map<Encodable, Encodable>{ |
||||||
|
{"error", m_error} |
||||||
|
}; |
||||||
|
} |
||||||
|
|
||||||
|
bool TextureCamera::CreateCamera(std::string cameraName) |
||||||
|
{ |
||||||
|
if (auto count = m_manager->getNumberOfCameras()) { |
||||||
|
for (int index = 0; index < count; index++) { |
||||||
|
Aurora::StreamCamera::CameraInfo info; |
||||||
|
if (m_manager->getCameraInfo(index, info)) { |
||||||
|
if (info.id == cameraName) { |
||||||
|
m_camera = m_manager->openCamera(info.id); |
||||||
|
if (m_camera) { |
||||||
|
m_camera->setListener(this); |
||||||
|
return true; |
||||||
|
} else { |
||||||
|
Unregister(); |
||||||
|
SendError("Stream camera error open camera"); |
||||||
|
return false; |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
void TextureCamera::SendError(std::string error) |
||||||
|
{ |
||||||
|
m_error = error; |
||||||
|
m_onError(); |
||||||
|
} |
||||||
|
|
||||||
|
std::map<Encodable, Encodable> TextureCamera::StartCapture(size_t width, size_t height) |
||||||
|
{ |
||||||
|
m_viewWidth = width; |
||||||
|
m_viewHeight = height; |
||||||
|
|
||||||
|
if (m_camera) { |
||||||
|
if (m_camera->captureStarted()) { |
||||||
|
m_camera->stopCapture(); |
||||||
|
} |
||||||
|
Aurora::StreamCamera::CameraInfo info; |
||||||
|
if (m_camera->getInfo(info)) { |
||||||
|
std::vector<Aurora::StreamCamera::CameraCapability> caps; |
||||||
|
|
||||||
|
if (m_manager->queryCapabilities(info.id, caps)) { |
||||||
|
for(unsigned int i = 0; i< caps.size(); i++) { |
||||||
|
if (width + height <= caps[i].width + caps[i].height || caps.size() == i - 1) { |
||||||
|
|
||||||
|
m_captureWidth = caps[i].width; |
||||||
|
m_captureHeight = caps[i].height; |
||||||
|
|
||||||
|
if (!m_camera->startCapture(caps[i])) { |
||||||
|
Unregister(); |
||||||
|
SendError("Stream camera error start capture"); |
||||||
|
} |
||||||
|
|
||||||
|
break; |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
return GetState(); |
||||||
|
} |
||||||
|
|
||||||
|
void TextureCamera::StopCapture() |
||||||
|
{ |
||||||
|
if (m_camera && m_camera->captureStarted()) { |
||||||
|
m_camera->stopCapture(); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
std::map<Encodable, Encodable> TextureCamera::Register(std::string cameraName) |
||||||
|
{ |
||||||
|
m_textureId = m_plugin->RegisterTexture( |
||||||
|
[this]([[maybe_unused]] size_t width, [[maybe_unused]] size_t height) { |
||||||
|
return m_variant; |
||||||
|
}); |
||||||
|
|
||||||
|
if (CreateCamera(cameraName) && m_viewWidth != 0 && m_viewHeight != 0) { |
||||||
|
StartCapture(m_viewWidth, m_viewHeight); |
||||||
|
} |
||||||
|
|
||||||
|
return GetState(); |
||||||
|
} |
||||||
|
|
||||||
|
std::map<Encodable, Encodable> TextureCamera::Unregister() |
||||||
|
{ |
||||||
|
StopCapture(); |
||||||
|
|
||||||
|
m_error = ""; |
||||||
|
m_textureId = 0; |
||||||
|
m_captureWidth = 0; |
||||||
|
m_captureHeight = 0; |
||||||
|
m_variant = nullptr; |
||||||
|
m_camera = nullptr; |
||||||
|
|
||||||
|
m_plugin->UnregisterTexture(m_textureId); |
||||||
|
|
||||||
|
return GetState(); |
||||||
|
} |
||||||
|
|
||||||
|
void TextureCamera::onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer) |
||||||
|
{ |
||||||
|
if (buffer->handleType == Aurora::StreamCamera::HandleType::EGL && false) { |
||||||
|
// @todo not tested
|
||||||
|
auto eglImage = TextureCameraEGLHelper::EGLCreateImage(buffer); |
||||||
|
m_variant = new TextureVariant(FlutterEGLImage{ |
||||||
|
eglImage, |
||||||
|
buffer->width, |
||||||
|
buffer->height, |
||||||
|
}); |
||||||
|
} else { |
||||||
|
m_image = TextureCameraPixelsHelper::YUVtoARGB(buffer->mapYCbCr()); |
||||||
|
auto pixels = static_cast<uint8_t *>(m_image->bits()); |
||||||
|
m_variant = new TextureVariant(FlutterPixelBuffer{ |
||||||
|
pixels, |
||||||
|
buffer->width, |
||||||
|
buffer->height, |
||||||
|
}); |
||||||
|
} |
||||||
|
|
||||||
|
m_plugin->MarkTextureAvailable(m_textureId); |
||||||
|
} |
||||||
|
|
||||||
|
void TextureCamera::onCameraError(const std::string &errorDescription) |
||||||
|
{ |
||||||
|
Unregister(); |
||||||
|
SendError(errorDescription); |
||||||
|
} |
||||||
|
|
||||||
|
void TextureCamera::onCameraParameterChanged([[maybe_unused]] Aurora::StreamCamera::CameraParameter parameter, |
||||||
|
const std::string &value) |
||||||
|
{ |
||||||
|
std::cout << "onCameraParameterChanged: " << value << std::endl;
|
||||||
|
} |
@ -0,0 +1,42 @@ |
|||||||
|
/**
|
||||||
|
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||||
|
* SPDX-License-Identifier: BSD-3-Clause |
||||||
|
*/ |
||||||
|
#include <camera_aurora/texture_camera_egl_helper.h> |
||||||
|
#include <flutter/platform-methods.h> |
||||||
|
|
||||||
|
#include <GLES2/gl2.h> |
||||||
|
#include <GLES2/gl2ext.h> |
||||||
|
|
||||||
|
static PFNEGLCREATEIMAGEKHRPROC eglCreateImageKHR; |
||||||
|
static PFNEGLDESTROYIMAGEKHRPROC eglDestroyImageKHR; |
||||||
|
|
||||||
|
void TextureCameraEGLHelper::EGLInit() |
||||||
|
{ |
||||||
|
eglCreateImageKHR = reinterpret_cast<PFNEGLCREATEIMAGEKHRPROC>( |
||||||
|
eglGetProcAddress("eglCreateImageKHR")); |
||||||
|
eglDestroyImageKHR = reinterpret_cast<PFNEGLDESTROYIMAGEKHRPROC>( |
||||||
|
eglGetProcAddress("eglDestroyImageKHR")); |
||||||
|
} |
||||||
|
|
||||||
|
EGLImageKHR TextureCameraEGLHelper::EGLCreateImage( |
||||||
|
std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer) |
||||||
|
{ |
||||||
|
auto display = PlatformMethods::GetEGLDisplay(); |
||||||
|
auto context = PlatformMethods::GetEGLContext(); |
||||||
|
|
||||||
|
const void *handle = buffer->handle; |
||||||
|
GLint eglImgAttrs[] = {EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE, EGL_NONE}; |
||||||
|
return eglCreateImageKHR(display, |
||||||
|
context, |
||||||
|
EGL_NATIVE_BUFFER_ANDROID, |
||||||
|
(EGLClientBuffer) handle, |
||||||
|
eglImgAttrs); |
||||||
|
} |
||||||
|
|
||||||
|
void TextureCameraEGLHelper::EGLDestroyImage(EGLImageKHR image) |
||||||
|
{ |
||||||
|
auto display = PlatformMethods::GetEGLDisplay(); |
||||||
|
|
||||||
|
eglDestroyImageKHR(display, image); |
||||||
|
} |
@ -0,0 +1,77 @@ |
|||||||
|
/**
|
||||||
|
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||||
|
* SPDX-License-Identifier: BSD-3-Clause |
||||||
|
*/ |
||||||
|
#include <camera_aurora/texture_camera_pixels_helper.h> |
||||||
|
|
||||||
|
QImage *TextureCameraPixelsHelper::YUVtoARGB(std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame) |
||||||
|
{ |
||||||
|
QSize size(frame->width, frame->height); |
||||||
|
QImage *image = new QImage(size, QImage::Format_RGBA8888); |
||||||
|
|
||||||
|
planarYuv420ToArgb(frame->y, |
||||||
|
frame->cr, |
||||||
|
frame->cb, |
||||||
|
frame->yStride, |
||||||
|
frame->cStride, |
||||||
|
frame->cStride, |
||||||
|
frame->chromaStep, |
||||||
|
reinterpret_cast<quint32 *>(image->bits()), |
||||||
|
frame->width, |
||||||
|
frame->height); |
||||||
|
|
||||||
|
return image; |
||||||
|
} |
||||||
|
|
||||||
|
quint32 TextureCameraPixelsHelper::yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a = 255) |
||||||
|
{ |
||||||
|
qint32 yy = (y - 16) * 298; |
||||||
|
|
||||||
|
return (a << 24) |
||||||
|
| qBound(0, (yy + rv) >> 8, 255) << 16 |
||||||
|
| qBound(0, (yy - guv) >> 8, 255) << 8 |
||||||
|
| qBound(0, (yy + bu) >> 8, 255); |
||||||
|
} |
||||||
|
|
||||||
|
void TextureCameraPixelsHelper::planarYuv420ToArgb(const uchar *y, |
||||||
|
const uchar *u, |
||||||
|
const uchar *v, |
||||||
|
qint32 yStride, |
||||||
|
qint32 uStride, |
||||||
|
qint32 vStride, |
||||||
|
qint32 uvPixelStride, |
||||||
|
quint32 *rgb, |
||||||
|
qint32 width, |
||||||
|
qint32 height) |
||||||
|
{ |
||||||
|
quint32 *rgb0 = rgb; |
||||||
|
quint32 *rgb1 = rgb + width; |
||||||
|
|
||||||
|
for (qint32 j = 0; j < height; j += 2) { |
||||||
|
const uchar *lineY0 = y; |
||||||
|
const uchar *lineY1 = y + yStride; |
||||||
|
const uchar *lineU = u; |
||||||
|
const uchar *lineV = v; |
||||||
|
|
||||||
|
for (qint32 i = 0; i < width; i += 2) { |
||||||
|
const qint32 uu = *lineU - 128; |
||||||
|
const qint32 vv = *lineV - 128; |
||||||
|
const qint32 rv = 409 * vv + 128; |
||||||
|
const qint32 guv = 100 * uu + 208 * vv + 128; |
||||||
|
const qint32 bu = 516 * uu + 128; |
||||||
|
|
||||||
|
lineU += uvPixelStride; |
||||||
|
lineV += uvPixelStride; |
||||||
|
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu); |
||||||
|
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu); |
||||||
|
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu); |
||||||
|
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu); |
||||||
|
} |
||||||
|
|
||||||
|
y += yStride << 1; |
||||||
|
u += uStride; |
||||||
|
v += vStride; |
||||||
|
rgb0 += width; |
||||||
|
rgb1 += width; |
||||||
|
} |
||||||
|
} |
@ -1,155 +1,38 @@ |
|||||||
// SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
// SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||||
// SPDX-License-Identifier: BSD-3-Clause |
// SPDX-License-Identifier: BSD-3-Clause |
||||||
import 'dart:async'; |
|
||||||
import 'dart:convert'; |
|
||||||
|
|
||||||
import 'package:camera_platform_interface/camera_platform_interface.dart'; |
enum OrientationEvent { |
||||||
import 'package:flutter/material.dart'; |
undefined, |
||||||
import 'package:flutter/services.dart'; |
portrait, |
||||||
|
landscape, |
||||||
import 'camera_aurora_method_channel.dart'; |
portraitFlipped, |
||||||
import 'camera_aurora_platform_interface.dart'; |
landscapeFlipped, |
||||||
|
} |
||||||
class CameraAurora extends CameraPlatform { |
|
||||||
/// Registers this class as the default instance of [CameraPlatform]. |
|
||||||
static void registerWith() { |
|
||||||
CameraPlatform.instance = CameraAurora(); |
|
||||||
} |
|
||||||
|
|
||||||
// The stream for vending frames to platform interface clients. |
|
||||||
StreamController<CameraImageData>? _frameStreamController; |
|
||||||
|
|
||||||
/// Completes with a list of available cameras. |
|
||||||
/// |
|
||||||
/// This method returns an empty list when no cameras are available. |
|
||||||
@override |
|
||||||
Future<List<CameraDescription>> availableCameras() => |
|
||||||
CameraAuroraPlatform.instance.availableCameras(); |
|
||||||
|
|
||||||
/// Creates an uninitialized camera instance and returns the cameraId. |
|
||||||
@override |
|
||||||
Future<int> createCamera( |
|
||||||
CameraDescription cameraDescription, |
|
||||||
ResolutionPreset? resolutionPreset, { |
|
||||||
bool enableAudio = false, |
|
||||||
}) { |
|
||||||
EventChannel(CameraAuroraEvents.cameraAuroraStreamedFrame.name) |
|
||||||
.receiveBroadcastStream() |
|
||||||
.listen((event) { |
|
||||||
debugPrint(event); |
|
||||||
}); |
|
||||||
return CameraAuroraPlatform.instance.createCamera(cameraDescription.name); |
|
||||||
} |
|
||||||
|
|
||||||
/// Initializes the camera on the device. |
|
||||||
/// |
|
||||||
/// [imageFormatGroup] is used to specify the image formatting used. |
|
||||||
/// On Android this defaults to ImageFormat.YUV_420_888 and applies only to the imageStream. |
|
||||||
/// On iOS this defaults to kCVPixelFormatType_32BGRA. |
|
||||||
/// On Web this parameter is currently not supported. |
|
||||||
@override |
|
||||||
Future<void> initializeCamera( |
|
||||||
int cameraId, { |
|
||||||
ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown, |
|
||||||
}) async { |
|
||||||
// init |
|
||||||
} |
|
||||||
|
|
||||||
/// Releases the resources of this camera. |
|
||||||
@override |
|
||||||
Future<void> dispose(int cameraId) { |
|
||||||
return CameraAuroraPlatform.instance.dispose(cameraId); |
|
||||||
} |
|
||||||
|
|
||||||
/// Captures an image and returns the file where it was saved. |
|
||||||
@override |
|
||||||
Future<XFile> takePicture(int cameraId) => |
|
||||||
CameraAuroraPlatform.instance.takePicture(cameraId); |
|
||||||
|
|
||||||
/// Starts a video recording. |
|
||||||
/// |
|
||||||
/// The length of the recording can be limited by specifying the [maxVideoDuration]. |
|
||||||
/// By default no maximum duration is specified, |
|
||||||
/// meaning the recording will continue until manually stopped. |
|
||||||
/// With [maxVideoDuration] set the video is returned in a [VideoRecordedEvent] |
|
||||||
/// through the [onVideoRecordedEvent] stream when the set duration is reached. |
|
||||||
/// |
|
||||||
/// This method is deprecated in favour of [startVideoCapturing]. |
|
||||||
@override |
|
||||||
Future<void> startVideoRecording(int cameraId, |
|
||||||
{Duration? maxVideoDuration}) => |
|
||||||
CameraAuroraPlatform.instance.startVideoRecording(cameraId); |
|
||||||
|
|
||||||
/// Stops the video recording and returns the file where it was saved. |
class CameraState { |
||||||
@override |
CameraState.fromJson(Map<dynamic, dynamic> json) |
||||||
Future<XFile> stopVideoRecording(int cameraId) => |
: id = json['id'] ?? "", |
||||||
CameraAuroraPlatform.instance.stopVideoRecording(cameraId); |
textureId = json['textureId'] ?? -1, |
||||||
|
width = (json['width'] ?? 0).toDouble(), |
||||||
|
height = (json['height'] ?? 0).toDouble(), |
||||||
|
rotationCamera = json['rotationCamera'] ?? 0, |
||||||
|
rotationDisplay = json['rotationDisplay'] ?? 0, |
||||||
|
error = json['error'] ?? ''; |
||||||
|
|
||||||
/// Pause video recording. |
final String id; |
||||||
@override |
final int textureId; |
||||||
Future<void> pauseVideoRecording(int cameraId) => |
final double width; |
||||||
CameraAuroraPlatform.instance.pauseVideoRecording(cameraId); |
final double height; |
||||||
|
final int rotationCamera; |
||||||
|
final int rotationDisplay; |
||||||
|
final String error; |
||||||
|
|
||||||
/// Resume video recording after pausing. |
bool isNotEmpty() => textureId != -1; |
||||||
@override |
|
||||||
Future<void> resumeVideoRecording(int cameraId) => |
|
||||||
CameraAuroraPlatform.instance.resumeVideoRecording(cameraId); |
|
||||||
|
|
||||||
/// The ui orientation changed. |
bool hasError() => error.isNotEmpty; |
||||||
/// |
|
||||||
/// Implementations for this: |
|
||||||
/// - Should support all 4 orientations. |
|
||||||
@override |
|
||||||
Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() async* { |
|
||||||
yield const DeviceOrientationChangedEvent(DeviceOrientation.portraitUp); |
|
||||||
} |
|
||||||
|
|
||||||
/// The camera has been initialized. |
|
||||||
@override |
|
||||||
Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) async* { |
|
||||||
yield CameraInitializedEvent( |
|
||||||
cameraId, |
|
||||||
// previewWidth |
|
||||||
400, |
|
||||||
// previewHeight |
|
||||||
400, |
|
||||||
// exposureMode |
|
||||||
ExposureMode.auto, |
|
||||||
// exposurePointSupported |
|
||||||
true, |
|
||||||
// focusMode |
|
||||||
FocusMode.auto, |
|
||||||
// focusPointSupported |
|
||||||
true, |
|
||||||
); |
|
||||||
} |
|
||||||
|
|
||||||
@override |
|
||||||
Stream<CameraImageData> onStreamedFrameAvailable( |
|
||||||
int cameraId, { |
|
||||||
CameraImageStreamOptions? options, |
|
||||||
}) { |
|
||||||
_frameStreamController = StreamController<CameraImageData>( |
|
||||||
onListen: () => |
|
||||||
CameraAuroraPlatform.instance.streamedFrame(cameraId).listen((data) { |
|
||||||
_frameStreamController!.add(data); |
|
||||||
}), |
|
||||||
onPause: () => {}, |
|
||||||
onResume: () => {}, |
|
||||||
onCancel: () => {}, |
|
||||||
); |
|
||||||
return _frameStreamController!.stream; |
|
||||||
} |
|
||||||
|
|
||||||
/// Returns a widget showing a live camera preview. |
|
||||||
@override |
@override |
||||||
Widget buildPreview(int cameraId) { |
String toString() { |
||||||
return Center( |
return '{id: $id, textureId: $textureId, width: $width, height: $height, rotationCamera: $rotationCamera, rotationDisplay: $rotationDisplay, error: $error}'; |
||||||
child: Text( |
|
||||||
'Camera: $cameraId', |
|
||||||
style: |
|
||||||
const TextStyle(fontWeight: FontWeight.bold, color: Colors.white), |
|
||||||
), |
|
||||||
); |
|
||||||
} |
} |
||||||
} |
} |
||||||
|
@ -1,155 +1,103 @@ |
|||||||
// SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
// SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru> |
||||||
// SPDX-License-Identifier: BSD-3-Clause |
// SPDX-License-Identifier: BSD-3-Clause |
||||||
import 'dart:async'; |
import 'package:camera_aurora/camera_aurora_platform_interface.dart'; |
||||||
import 'dart:convert'; |
|
||||||
|
|
||||||
import 'package:camera_platform_interface/camera_platform_interface.dart'; |
|
||||||
import 'package:flutter/material.dart'; |
import 'package:flutter/material.dart'; |
||||||
import 'package:flutter/services.dart'; |
|
||||||
|
|
||||||
import 'camera_aurora_method_channel.dart'; |
import 'camera_data.dart'; |
||||||
import 'camera_aurora_platform_interface.dart'; |
|
||||||
|
|
||||||
class CameraAurora extends CameraPlatform { |
class CameraViewfinder extends StatefulWidget { |
||||||
/// Registers this class as the default instance of [CameraPlatform]. |
const CameraViewfinder({ |
||||||
static void registerWith() { |
super.key, |
||||||
CameraPlatform.instance = CameraAurora(); |
required this.width, |
||||||
} |
required this.height, |
||||||
|
}); |
||||||
|
|
||||||
// The stream for vending frames to platform interface clients. |
final double width; |
||||||
StreamController<CameraImageData>? _frameStreamController; |
final double height; |
||||||
|
|
||||||
/// Completes with a list of available cameras. |
|
||||||
/// |
|
||||||
/// This method returns an empty list when no cameras are available. |
|
||||||
@override |
@override |
||||||
Future<List<CameraDescription>> availableCameras() => |
State<CameraViewfinder> createState() => _CameraViewfinderState(); |
||||||
CameraAuroraPlatform.instance.availableCameras(); |
} |
||||||
|
|
||||||
|
class _CameraViewfinderState extends State<CameraViewfinder> { |
||||||
|
CameraState _cameraState = CameraState.fromJson({}); |
||||||
|
|
||||||
/// Creates an uninitialized camera instance and returns the cameraId. |
|
||||||
@override |
@override |
||||||
Future<int> createCamera( |
initState() { |
||||||
CameraDescription cameraDescription, |
super.initState(); |
||||||
ResolutionPreset? resolutionPreset, { |
CameraAuroraPlatform.instance.startCapture(widget.width, widget.height); |
||||||
bool enableAudio = false, |
CameraAuroraPlatform.instance.onChangeState().listen((event) { |
||||||
}) { |
if (mounted) { |
||||||
EventChannel(CameraAuroraEvents.cameraAuroraStreamedFrame.name) |
setState(() { |
||||||
.receiveBroadcastStream() |
debugPrint(_cameraState.toString()); |
||||||
.listen((event) { |
_cameraState = event; |
||||||
debugPrint(event); |
|
||||||
}); |
}); |
||||||
return CameraAuroraPlatform.instance.createCamera(cameraDescription.name); |
|
||||||
} |
} |
||||||
|
}); |
||||||
/// Initializes the camera on the device. |
|
||||||
/// |
|
||||||
/// [imageFormatGroup] is used to specify the image formatting used. |
|
||||||
/// On Android this defaults to ImageFormat.YUV_420_888 and applies only to the imageStream. |
|
||||||
/// On iOS this defaults to kCVPixelFormatType_32BGRA. |
|
||||||
/// On Web this parameter is currently not supported. |
|
||||||
@override |
|
||||||
Future<void> initializeCamera( |
|
||||||
int cameraId, { |
|
||||||
ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown, |
|
||||||
}) async { |
|
||||||
// init |
|
||||||
} |
} |
||||||
|
|
||||||
/// Releases the resources of this camera. |
|
||||||
@override |
@override |
||||||
Future<void> dispose(int cameraId) { |
void dispose() { |
||||||
return CameraAuroraPlatform.instance.dispose(cameraId); |
super.dispose(); |
||||||
|
CameraAuroraPlatform.instance.stopCapture(); |
||||||
} |
} |
||||||
|
|
||||||
/// Captures an image and returns the file where it was saved. |
|
||||||
@override |
|
||||||
Future<XFile> takePicture(int cameraId) => |
|
||||||
CameraAuroraPlatform.instance.takePicture(cameraId); |
|
||||||
|
|
||||||
/// Starts a video recording. |
|
||||||
/// |
|
||||||
/// The length of the recording can be limited by specifying the [maxVideoDuration]. |
|
||||||
/// By default no maximum duration is specified, |
|
||||||
/// meaning the recording will continue until manually stopped. |
|
||||||
/// With [maxVideoDuration] set the video is returned in a [VideoRecordedEvent] |
|
||||||
/// through the [onVideoRecordedEvent] stream when the set duration is reached. |
|
||||||
/// |
|
||||||
/// This method is deprecated in favour of [startVideoCapturing]. |
|
||||||
@override |
|
||||||
Future<void> startVideoRecording(int cameraId, |
|
||||||
{Duration? maxVideoDuration}) => |
|
||||||
CameraAuroraPlatform.instance.startVideoRecording(cameraId); |
|
||||||
|
|
||||||
/// Stops the video recording and returns the file where it was saved. |
|
||||||
@override |
|
||||||
Future<XFile> stopVideoRecording(int cameraId) => |
|
||||||
CameraAuroraPlatform.instance.stopVideoRecording(cameraId); |
|
||||||
|
|
||||||
/// Pause video recording. |
|
||||||
@override |
@override |
||||||
Future<void> pauseVideoRecording(int cameraId) => |
Widget build(BuildContext context) { |
||||||
CameraAuroraPlatform.instance.pauseVideoRecording(cameraId); |
if (_cameraState.hasError()) { |
||||||
|
return Center( |
||||||
|
child: Text( |
||||||
|
'Error: ${_cameraState.error}', |
||||||
|
style: |
||||||
|
const TextStyle(fontWeight: FontWeight.bold, color: Colors.white), |
||||||
|
), |
||||||
|
); |
||||||
|
} else if (_cameraState.isNotEmpty()) { |
||||||
|
int turn = 0; |
||||||
|
|
||||||
|
switch (_cameraState.rotationDisplay) { |
||||||
|
case 0: |
||||||
|
turn = _cameraState.id.contains('front') ? -1 : 1; |
||||||
|
break; |
||||||
|
case 90: |
||||||
|
turn = 0; |
||||||
|
break; |
||||||
|
case 180: |
||||||
|
turn = _cameraState.id.contains('front') ? 1 : -1; |
||||||
|
break; |
||||||
|
default: // 270 |
||||||
|
turn = 2; |
||||||
|
} |
||||||
|
|
||||||
/// Resume video recording after pausing. |
double height = 10; |
||||||
@override |
double width = 10; |
||||||
Future<void> resumeVideoRecording(int cameraId) => |
|
||||||
CameraAuroraPlatform.instance.resumeVideoRecording(cameraId); |
|
||||||
|
|
||||||
/// The ui orientation changed. |
// widget.width = widget.height |
||||||
/// |
// _cameraState.height = _cameraState.width |
||||||
/// Implementations for this: |
|
||||||
/// - Should support all 4 orientations. |
|
||||||
@override |
|
||||||
Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() async* { |
|
||||||
yield const DeviceOrientationChangedEvent(DeviceOrientation.portraitUp); |
|
||||||
} |
|
||||||
|
|
||||||
/// The camera has been initialized. |
if (_cameraState.height != 0 && _cameraState.width != 0) { |
||||||
@override |
if (_cameraState.rotationDisplay == 90 || |
||||||
Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) async* { |
_cameraState.rotationDisplay == 270) { |
||||||
yield CameraInitializedEvent( |
width = widget.width * _cameraState.height / _cameraState.width; |
||||||
cameraId, |
height = widget.height * _cameraState.width / _cameraState.height; |
||||||
// previewWidth |
} else { |
||||||
400, |
width = _cameraState.height * widget.height / _cameraState.width; |
||||||
// previewHeight |
height = _cameraState.width * widget.width / _cameraState.height; |
||||||
400, |
|
||||||
// exposureMode |
|
||||||
ExposureMode.auto, |
|
||||||
// exposurePointSupported |
|
||||||
true, |
|
||||||
// focusMode |
|
||||||
FocusMode.auto, |
|
||||||
// focusPointSupported |
|
||||||
true, |
|
||||||
); |
|
||||||
} |
} |
||||||
|
|
||||||
@override |
|
||||||
Stream<CameraImageData> onStreamedFrameAvailable( |
|
||||||
int cameraId, { |
|
||||||
CameraImageStreamOptions? options, |
|
||||||
}) { |
|
||||||
_frameStreamController = StreamController<CameraImageData>( |
|
||||||
onListen: () => |
|
||||||
CameraAuroraPlatform.instance.streamedFrame(cameraId).listen((data) { |
|
||||||
_frameStreamController!.add(data); |
|
||||||
}), |
|
||||||
onPause: () => {}, |
|
||||||
onResume: () => {}, |
|
||||||
onCancel: () => {}, |
|
||||||
); |
|
||||||
return _frameStreamController!.stream; |
|
||||||
} |
} |
||||||
|
|
||||||
/// Returns a widget showing a live camera preview. |
return RotatedBox( |
||||||
@override |
quarterTurns: turn, |
||||||
Widget buildPreview(int cameraId) { |
child: SizedBox( |
||||||
return Center( |
width: height, // height |
||||||
child: Text( |
height: width, // widht |
||||||
'Camera: $cameraId', |
child: Opacity( |
||||||
style: |
opacity: _cameraState.height == 0 ? 0 : 1, |
||||||
const TextStyle(fontWeight: FontWeight.bold, color: Colors.white), |
child: Texture(textureId: _cameraState.textureId), |
||||||
|
), |
||||||
), |
), |
||||||
); |
); |
||||||
} |
} |
||||||
|
return const SizedBox.shrink(); |
||||||
|
} |
||||||
} |
} |
||||||
|
@ -1,43 +0,0 @@ |
|||||||
import 'dart:typed_data'; |
|
||||||
|
|
||||||
import 'package:camera_platform_interface/camera_platform_interface.dart'; |
|
||||||
|
|
||||||
CameraImageData cameraImageFromPlatformData(Map<dynamic, dynamic> data) { |
|
||||||
return CameraImageData( |
|
||||||
format: _cameraImageFormatFromPlatformData(data['format']), |
|
||||||
height: data['height'] as int, |
|
||||||
width: data['width'] as int, |
|
||||||
lensAperture: data['lensAperture'] as double?, |
|
||||||
sensorExposureTime: data['sensorExposureTime'] as int?, |
|
||||||
sensorSensitivity: data['sensorSensitivity'] as double?, |
|
||||||
planes: List<CameraImagePlane>.unmodifiable( |
|
||||||
(data['planes'] as List<dynamic>).map<CameraImagePlane>( |
|
||||||
(dynamic planeData) => _cameraImagePlaneFromPlatformData( |
|
||||||
planeData as Map<dynamic, dynamic>)))); |
|
||||||
} |
|
||||||
|
|
||||||
CameraImageFormat _cameraImageFormatFromPlatformData(dynamic data) { |
|
||||||
return CameraImageFormat(_imageFormatGroupFromPlatformData(data), raw: data); |
|
||||||
} |
|
||||||
|
|
||||||
ImageFormatGroup _imageFormatGroupFromPlatformData(dynamic data) { |
|
||||||
switch (data) { |
|
||||||
case 35: // android.graphics.ImageFormat.YUV_420_888 |
|
||||||
return ImageFormatGroup.yuv420; |
|
||||||
case 256: // android.graphics.ImageFormat.JPEG |
|
||||||
return ImageFormatGroup.jpeg; |
|
||||||
case 17: // android.graphics.ImageFormat.NV21 |
|
||||||
return ImageFormatGroup.nv21; |
|
||||||
} |
|
||||||
|
|
||||||
return ImageFormatGroup.unknown; |
|
||||||
} |
|
||||||
|
|
||||||
CameraImagePlane _cameraImagePlaneFromPlatformData(Map<dynamic, dynamic> data) { |
|
||||||
return CameraImagePlane( |
|
||||||
bytes: data['bytes'] as Uint8List, |
|
||||||
bytesPerPixel: data['bytesPerPixel'] as int?, |
|
||||||
bytesPerRow: data['bytesPerRow'] as int, |
|
||||||
height: data['height'] as int?, |
|
||||||
width: data['width'] as int?); |
|
||||||
} |
|
Loading…
Reference in new issue