Browse Source

[camera] Add pixel buffer

camera
Vitaliy Zarubin 1 year ago
parent
commit
f5c0dcfe3f
  1. 2
      example/aurora/rpm/ru.auroraos.flutter_example_packages.spec
  2. 19
      example/lib/packages/camera/widgets/camera_body.dart
  3. 11
      example/lib/packages/camera/widgets/camera_control_panel.dart
  4. 8
      packages/camera/camera_aurora/aurora/CMakeLists.txt
  5. 162
      packages/camera/camera_aurora/aurora/camera_aurora_plugin.cpp
  6. 20
      packages/camera/camera_aurora/aurora/include/camera_aurora/camera_aurora_plugin.h
  7. 57
      packages/camera/camera_aurora/aurora/include/camera_aurora/texture_camera.h
  8. 21
      packages/camera/camera_aurora/aurora/include/camera_aurora/texture_camera_egl_helper.h
  9. 33
      packages/camera/camera_aurora/aurora/include/camera_aurora/texture_camera_pixels_helper.h
  10. 218
      packages/camera/camera_aurora/aurora/texture_camera.cpp
  11. 42
      packages/camera/camera_aurora/aurora/texture_camera_egl_helper.cpp
  12. 77
      packages/camera/camera_aurora/aurora/texture_camera_pixels_helper.cpp
  13. 112
      packages/camera/camera_aurora/lib/camera_aurora.dart
  14. 113
      packages/camera/camera_aurora/lib/camera_aurora_method_channel.dart
  15. 31
      packages/camera/camera_aurora/lib/camera_aurora_platform_interface.dart
  16. 171
      packages/camera/camera_aurora/lib/camera_data.dart
  17. 220
      packages/camera/camera_aurora/lib/camera_viewfinder.dart
  18. 43
      packages/camera/camera_aurora/lib/type_conversion.dart
  19. 1
      packages/camera/camera_aurora/pubspec.yaml

2
example/aurora/rpm/ru.auroraos.flutter_example_packages.spec

@ -12,6 +12,8 @@ BuildRequires: cmake
BuildRequires: pkgconfig(sqlite3)
BuildRequires: pkgconfig(flutter-embedder)
BuildRequires: pkgconfig(sensord-qt5)
BuildRequires: pkgconfig(glesv2)
BuildRequires: pkgconfig(streamcamera)
%description
%{summary}.

19
example/lib/packages/camera/widgets/camera_body.dart

@ -26,6 +26,12 @@ class CameraBody extends AppStatefulWidget {
}
class _CameraBodyState extends AppState<CameraBody> {
@override
void dispose() {
widget.controller?.dispose();
super.dispose();
}
@override
Widget buildWide(
BuildContext context,
@ -65,7 +71,7 @@ class _CameraBodyState extends AppState<CameraBody> {
width: double.infinity,
height: double.infinity,
alignment: Alignment.center,
child: CameraPreview(widget.controller!),
child: widget.controller!.buildPreview(),
),
// Show dot when recording is active
@ -89,13 +95,10 @@ class _CameraBodyState extends AppState<CameraBody> {
width: double.infinity,
height: double.infinity,
alignment: Alignment.center,
child: RotationTransition(
turns: const AlwaysStoppedAnimation(90 / 360),
child: Image.file(
widget.photo!,
fit: BoxFit.fill,
filterQuality: FilterQuality.high,
),
child: Image.file(
widget.photo!,
fit: BoxFit.fill,
filterQuality: FilterQuality.high,
),
),
],

11
example/lib/packages/camera/widgets/camera_control_panel.dart

@ -60,10 +60,15 @@ class _CameraControlPanelState extends AppState<CameraControlPanel> {
child: IconButton(
icon: Icon(
isRecordingVideo ? Icons.stop_circle : Icons.videocam,
color: AppColors.primary
.withOpacity(isPhoto || widget.disable ? 0.5 : 1),
color: AppColors.primary.withOpacity(isPhoto ||
widget.disable ||
true /* @todo disable video record */
? 0.5
: 1),
),
onPressed: isPhoto || widget.disable
onPressed: isPhoto ||
widget.disable ||
true // @todo disable video record
? null
: () {
if (isRecordingVideo) {

8
packages/camera/camera_aurora/aurora/CMakeLists.txt

@ -16,15 +16,21 @@ set(CMAKE_CXX_FLAGS_RELEASE "-O3")
find_package(PkgConfig REQUIRED)
find_package(Qt5 COMPONENTS Core Multimedia REQUIRED)
pkg_check_modules(FlutterEmbedder REQUIRED IMPORTED_TARGET flutter-embedder)
pkg_check_modules(GLES REQUIRED IMPORTED_TARGET glesv2)
pkg_check_modules(SC REQUIRED IMPORTED_TARGET streamcamera)
add_library(${PLUGIN_NAME} SHARED
texture_camera_pixels_helper.cpp
texture_camera_egl_helper.cpp
texture_camera.cpp
camera_aurora_plugin.cpp
)
set_target_properties(${PLUGIN_NAME} PROPERTIES CXX_VISIBILITY_PRESET hidden AUTOMOC ON)
target_link_libraries(${PLUGIN_NAME} PRIVATE PkgConfig::FlutterEmbedder)
target_link_libraries(${PLUGIN_NAME} PRIVATE PkgConfig::FlutterEmbedder PkgConfig::GLES PkgConfig::SC)
target_link_libraries(${PLUGIN_NAME} PUBLIC Qt5::Core Qt5::Multimedia)
target_include_directories(${PLUGIN_NAME} PRIVATE ${FLUTTER_DIR})

162
packages/camera/camera_aurora/aurora/camera_aurora_plugin.cpp

@ -4,6 +4,8 @@
*/
#include <camera_aurora/camera_aurora_plugin.h>
#include <flutter/method-channel.h>
#include <flutter/platform-data.h>
#include <flutter/platform-events.h>
#include <flutter/platform-methods.h>
#include <QtCore>
@ -22,7 +24,8 @@ namespace CameraAuroraMethods
constexpr auto AvailableCameras = "availableCameras";
constexpr auto CreateCamera = "createCamera";
constexpr auto Dispose = "dispose";
constexpr auto InitializeCamera = "initializeCamera";
constexpr auto StartCapture = "startCapture";
constexpr auto StopCapture = "stopCapture";
constexpr auto TakePicture = "takePicture";
constexpr auto StartVideoRecording = "startVideoRecording";
constexpr auto StopVideoRecording = "stopVideoRecording";
@ -32,13 +35,26 @@ namespace CameraAuroraMethods
namespace CameraAuroraEvents
{
constexpr auto ReadyForCapture = "cameraAuroraReadyForCapture";
constexpr auto ImageSaved = "cameraAuroraImageSaved";
constexpr auto StreamedFrame = "cameraAuroraStreamedFrame";
constexpr auto StateChanged = "cameraAuroraStateChanged";
}
CameraAuroraPlugin::CameraAuroraPlugin()
{
PlatformEvents::SubscribeOrientationChanged([this]([[maybe_unused]] DisplayRotation orientation) {
if (this->m_isEnableStateChanged) {
auto state = this->m_textureCamera->GetState();
EventChannel(CameraAuroraEvents::StateChanged, MethodCodecType::Standard).SendEvent(state);
}
});
}
void CameraAuroraPlugin::RegisterWithRegistrar(PluginRegistrar &registrar)
{
m_textureCamera = new TextureCamera(registrar.GetRegisterTexture(), [this]() {
auto state = this->m_textureCamera->GetState();
EventChannel(CameraAuroraEvents::StateChanged, MethodCodecType::Standard).SendEvent(state);
});
RegisterMethods(registrar);
RegisterEvents(registrar);
}
@ -54,22 +70,24 @@ void CameraAuroraPlugin::RegisterMethods(PluginRegistrar &registrar)
onAvailableCameras(call);
return;
}
if (method == CameraAuroraMethods::CreateCamera)
{
onCreateCamera(call);
return;
}
if (method == CameraAuroraMethods::Dispose)
{
onDispose(call);
return;
}
if (method == CameraAuroraMethods::InitializeCamera)
if (method == CameraAuroraMethods::StartCapture)
{
onStartCapture(call);
return;
}
if (method == CameraAuroraMethods::StopCapture)
{
onInitializeCamera(call);
onStopCapture(call);
return;
}
@ -78,25 +96,22 @@ void CameraAuroraPlugin::RegisterMethods(PluginRegistrar &registrar)
onTakePicture(call);
return;
}
if (method == CameraAuroraMethods::StartVideoRecording)
{
onStartVideoRecording(call);
return;
}
if (method == CameraAuroraMethods::StopVideoRecording)
{
onStopVideoRecording(call);
return;
}
if (method == CameraAuroraMethods::PauseVideoRecording)
{
onPauseVideoRecording(call);
return;
}
if (method == CameraAuroraMethods::ResumeVideoRecording)
{
onResumeVideoRecording(call);
@ -115,156 +130,85 @@ void CameraAuroraPlugin::RegisterMethods(PluginRegistrar &registrar)
void CameraAuroraPlugin::RegisterEvents(PluginRegistrar &registrar)
{
registrar.RegisterEventChannel(
CameraAuroraEvents::ReadyForCapture, MethodCodecType::Standard,
[this](const Encodable &)
{ return EventResponse(); },
[this](const Encodable &)
{ return EventResponse(); });
registrar.RegisterEventChannel(
CameraAuroraEvents::ImageSaved, MethodCodecType::Standard,
[this](const Encodable &)
{ return EventResponse(); },
[this](const Encodable &)
{ return EventResponse(); });
registrar.RegisterEventChannel(
CameraAuroraEvents::StreamedFrame, MethodCodecType::Standard,
CameraAuroraEvents::StateChanged, MethodCodecType::Standard,
[this](const Encodable &)
{ return EventResponse(); },
{ this->m_isEnableStateChanged = true; return EventResponse(); },
[this](const Encodable &)
{ return EventResponse(); });
{ this->m_isEnableStateChanged = true; return EventResponse(); });
}
/**
* Methods
* Camera
*/
void CameraAuroraPlugin::onAvailableCameras(const MethodCall &call)
{
std::vector<Encodable> list;
const QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
for (const QCameraInfo &cameraInfo : cameras)
{
list.push_back(std::map<Encodable, Encodable>{
{"deviceName", cameraInfo.deviceName().toStdString()},
{"position", static_cast<int>(cameraInfo.position())},
{"orientation", cameraInfo.orientation()},
});
}
call.SendSuccessResponse(list);
call.SendSuccessResponse(m_textureCamera->GetAvailableCameras());
}
void CameraAuroraPlugin::onCreateCamera(const MethodCall &call)
{
QCameraInfo cameraInfo;
auto cameraName = call.GetArgument<Encodable::String>("cameraName");
const auto cameraName = call.GetArgument<Encodable::String>("cameraName");
auto state = m_textureCamera->Register(cameraName);
qDebug() << "onCreateCamera";
for (const QCameraInfo &item : QCameraInfo::availableCameras())
{
if (item.deviceName().toStdString() == cameraName)
{
cameraInfo = item;
break;
}
}
EventChannel(CameraAuroraEvents::StateChanged, MethodCodecType::Standard).SendEvent(state);
m_camera.reset(new QCamera(cameraInfo));
m_camera->setCaptureMode(QCamera::CaptureStillImage);
m_camera->start();
call.SendSuccessResponse(state);
}
m_imageCapture.reset(new QCameraImageCapture(m_camera.data()));
void CameraAuroraPlugin::onDispose(const MethodCall &call)
{
auto state = m_textureCamera->Unregister();
connect(m_imageCapture.data(), &QCameraImageCapture::readyForCaptureChanged, this, &CameraAuroraPlugin::readyForCapture);
connect(m_imageCapture.data(), &QCameraImageCapture::imageSaved, this, &CameraAuroraPlugin::imageSaved);
call.SendSuccessResponse(stoi(cameraName));
EventChannel(CameraAuroraEvents::StateChanged, MethodCodecType::Standard).SendEvent(state);
qDebug() << "onCreateCamera";
unimplemented(call);
}
void CameraAuroraPlugin::onDispose(const MethodCall &call)
void CameraAuroraPlugin::onStartCapture(const MethodCall &call)
{
const auto cameraId = call.GetArgument<Encodable::Int>("cameraId");
auto width = call.GetArgument<Encodable::Int>("width");
auto height = call.GetArgument<Encodable::Int>("height");
if (m_camera) {
qDebug() << "onDispose";
}
auto state = m_textureCamera->StartCapture(width, height);
EventChannel(CameraAuroraEvents::StateChanged, MethodCodecType::Standard).SendEvent(state);
unimplemented(call);
}
void CameraAuroraPlugin::onInitializeCamera(const MethodCall &call)
void CameraAuroraPlugin::onStopCapture(const MethodCall &call)
{
qDebug() << "onInitializeCamera";
m_textureCamera->StopCapture();
unimplemented(call);
}
void CameraAuroraPlugin::onTakePicture(const MethodCall &call)
{
if (m_imageCapture->isReadyForCapture()) {
m_imageCapture->capture();
call.SendSuccessResponse(true);
} else {
call.SendSuccessResponse(false);
}
call.SendSuccessResponse(m_textureCamera->GetImageBase64());
}
void CameraAuroraPlugin::onStartVideoRecording(const MethodCall &call)
{
qDebug() << "onStartVideoRecording";
unimplemented(call);
}
void CameraAuroraPlugin::onStopVideoRecording(const MethodCall &call)
{
qDebug() << "onStopVideoRecording";
unimplemented(call);
}
void CameraAuroraPlugin::onPauseVideoRecording(const MethodCall &call)
{
qDebug() << "onPauseVideoRecording";
unimplemented(call);
}
void CameraAuroraPlugin::onResumeVideoRecording(const MethodCall &call)
{
qDebug() << "onResumeVideoRecording";
unimplemented(call);
}
/**
* Slots
*/
void CameraAuroraPlugin::readyForCapture(bool ready)
{
qDebug() << "readyForCapture";
EventChannel(
CameraAuroraEvents::ReadyForCapture,
MethodCodecType::Standard)
.SendEvent(ready);
}
void CameraAuroraPlugin::imageSaved(int id, const QString &fileName)
{
qDebug() << "imageSaved";
EventChannel(
CameraAuroraEvents::ImageSaved,
MethodCodecType::Standard)
.SendEvent(std::vector<Encodable>{
id,
fileName.toStdString()});
}
void CameraAuroraPlugin::unimplemented(const MethodCall &call)
{
call.SendSuccessResponse(nullptr);

20
packages/camera/camera_aurora/aurora/include/camera_aurora/camera_aurora_plugin.h

@ -8,11 +8,7 @@
#include <flutter/plugin-interface.h>
#include <camera_aurora/globals.h>
#include <QtCore>
#include <QCamera>
#include <QCameraInfo>
#include <QMediaRecorder>
#include <QCameraImageCapture>
#include <camera_aurora/texture_camera.h>
class PLUGIN_EXPORT CameraAuroraPlugin final
: public QObject,
@ -21,12 +17,9 @@ class PLUGIN_EXPORT CameraAuroraPlugin final
Q_OBJECT
public:
CameraAuroraPlugin();
void RegisterWithRegistrar(PluginRegistrar &registrar) override;
public slots:
void readyForCapture(bool ready);
void imageSaved(int id, const QString &fileName);
private:
void RegisterMethods(PluginRegistrar &registrar);
void RegisterEvents(PluginRegistrar &registrar);
@ -34,8 +27,11 @@ private:
void onAvailableCameras(const MethodCall &call);
void onCreateCamera(const MethodCall &call);
void onDispose(const MethodCall &call);
void onInitializeCamera(const MethodCall &call);
void onStartCapture(const MethodCall &call);
void onStopCapture(const MethodCall &call);
void onTakePicture(const MethodCall &call);
void onStartVideoRecording(const MethodCall &call);
void onStopVideoRecording(const MethodCall &call);
void onPauseVideoRecording(const MethodCall &call);
@ -44,8 +40,8 @@ private:
void unimplemented(const MethodCall &call);
private:
QScopedPointer<QCamera> m_camera;
QScopedPointer<QCameraImageCapture> m_imageCapture;
TextureCamera *m_textureCamera;
bool m_isEnableStateChanged = false;
};
#endif /* FLUTTER_PLUGIN_CAMERA_AURORA_PLUGIN_H */

57
packages/camera/camera_aurora/aurora/include/camera_aurora/texture_camera.h

@ -0,0 +1,57 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef TEXTURE_CAMERA_BUFFER_H
#define TEXTURE_CAMERA_BUFFER_H
#include <flutter/plugin-interface.h>
#include <streamcamera/streamcamera.h>
#include <QImage>
#include <QtCore>
typedef std::function<void()> CameraErrorHandler;
class TextureCamera : public Aurora::StreamCamera::CameraListener
{
public:
TextureCamera(TextureRegistrar *plugin, const CameraErrorHandler &onError);
void onCameraError(const std::string &errorDescription) override;
void onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer) override;
void onCameraParameterChanged(Aurora::StreamCamera::CameraParameter,
const std::string &value) override;
std::vector<Encodable> GetAvailableCameras();
std::map<Encodable, Encodable> Register(std::string cameraName);
std::map<Encodable, Encodable> Unregister();
std::map<Encodable, Encodable> StartCapture(size_t width, size_t height);
void StopCapture();
std::map<Encodable, Encodable> GetState();
std::string GetImageBase64();
private:
bool CreateCamera(std::string cameraName);
void SendError(std::string error);
private:
TextureRegistrar *m_plugin;
CameraErrorHandler m_onError;
std::string m_error;
Aurora::StreamCamera::CameraManager *m_manager;
std::shared_ptr<Aurora::StreamCamera::Camera> m_camera;
int64_t m_textureId = 0;
size_t m_captureWidth = 0;
size_t m_captureHeight = 0;
size_t m_viewWidth = 0;
size_t m_viewHeight = 0;
TextureVariant *m_variant;
QImage *m_image;
};
#endif /* TEXTURE_CAMERA_BUFFER_H */

21
packages/camera/camera_aurora/aurora/include/camera_aurora/texture_camera_egl_helper.h

@ -0,0 +1,21 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef TEXTURE_CAMERA_EGL_HELPER_H
#define TEXTURE_CAMERA_EGL_HELPER_H
#include <streamcamera/streamcamera.h>
#include <EGL/egl.h>
#include <EGL/eglext.h>
class TextureCameraEGLHelper
{
public:
static void EGLInit();
static EGLImageKHR EGLCreateImage(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer);
static void EGLDestroyImage(EGLImageKHR image);
};
#endif /* TEXTURE_CAMERA_EGL_HELPER_H */

33
packages/camera/camera_aurora/aurora/include/camera_aurora/texture_camera_pixels_helper.h

@ -0,0 +1,33 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef TEXTURE_CAMERA_PIXELS_HELPER_H
#define TEXTURE_CAMERA_PIXELS_HELPER_H
#include <flutter/plugin-interface.h>
#include <streamcamera/streamcamera.h>
#include <QImage>
#include <QtCore>
class TextureCameraPixelsHelper
{
public:
static QImage *YUVtoARGB(std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame);
private:
static quint32 yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a);
static void planarYuv420ToArgb(const uchar *y,
const uchar *u,
const uchar *v,
qint32 yStride,
qint32 uStride,
qint32 vStride,
qint32 uvPixelStride,
quint32 *rgb,
qint32 width,
qint32 height);
};
#endif /* TEXTURE_CAMERA_PIXELS_HELPER_H */

218
packages/camera/camera_aurora/aurora/texture_camera.cpp

@ -0,0 +1,218 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <camera_aurora/texture_camera.h>
#include <camera_aurora/texture_camera_egl_helper.h>
#include <camera_aurora/texture_camera_pixels_helper.h>
#include <flutter/platform-data.h>
#include <flutter/platform-methods.h>
#include <QtCore>
#include <QBuffer>
TextureCamera::TextureCamera(TextureRegistrar *plugin, const CameraErrorHandler &onError)
: m_plugin(plugin)
, m_onError(onError)
, m_variant(nullptr)
, m_image(nullptr)
, m_manager(StreamCameraManager())
{
TextureCameraEGLHelper::EGLInit();
}
std::vector<Encodable> TextureCamera::GetAvailableCameras()
{
std::vector<Encodable> cameras;
auto count = m_manager->getNumberOfCameras();
for (int index = 0; index < count; index++) {
Aurora::StreamCamera::CameraInfo info;
if (m_manager->getCameraInfo(index, info)) {
cameras.push_back(std::map<Encodable, Encodable>{
{"id", info.id},
{"name", info.name},
{"provider", info.provider},
{"mountAngle", info.mountAngle},
});
}
}
return cameras;
}
std::string TextureCamera::GetImageBase64()
{
if (m_image && m_camera) {
Aurora::StreamCamera::CameraInfo info;
if (m_camera->getInfo(info)) {
QBuffer qbuffer;
qbuffer.open(QIODevice::WriteOnly);
QImage rotatedImg = m_image->transformed(QMatrix().rotate(info.mountAngle));
rotatedImg.save(&qbuffer, "JPEG");
return qbuffer.data().toBase64().toStdString();
}
}
return "";
}
std::map<Encodable, Encodable> TextureCamera::GetState()
{
Aurora::StreamCamera::CameraInfo info;
if (m_camera && m_camera->getInfo(info)) {
auto orientation = static_cast<int>(PlatformMethods::GetOrientation());
return std::map<Encodable, Encodable>{
{"id", info.id},
{"textureId", m_textureId},
{"width", m_captureWidth},
{"height", m_captureHeight},
{"rotationCamera", info.mountAngle},
{"rotationDisplay", orientation},
{"error", m_error},
};
}
return std::map<Encodable, Encodable>{
{"error", m_error}
};
}
bool TextureCamera::CreateCamera(std::string cameraName)
{
if (auto count = m_manager->getNumberOfCameras()) {
for (int index = 0; index < count; index++) {
Aurora::StreamCamera::CameraInfo info;
if (m_manager->getCameraInfo(index, info)) {
if (info.id == cameraName) {
m_camera = m_manager->openCamera(info.id);
if (m_camera) {
m_camera->setListener(this);
return true;
} else {
Unregister();
SendError("Stream camera error open camera");
return false;
}
}
}
}
}
return false;
}
void TextureCamera::SendError(std::string error)
{
m_error = error;
m_onError();
}
std::map<Encodable, Encodable> TextureCamera::StartCapture(size_t width, size_t height)
{
m_viewWidth = width;
m_viewHeight = height;
if (m_camera) {
if (m_camera->captureStarted()) {
m_camera->stopCapture();
}
Aurora::StreamCamera::CameraInfo info;
if (m_camera->getInfo(info)) {
std::vector<Aurora::StreamCamera::CameraCapability> caps;
if (m_manager->queryCapabilities(info.id, caps)) {
for(unsigned int i = 0; i< caps.size(); i++) {
if (width + height <= caps[i].width + caps[i].height || caps.size() == i - 1) {
m_captureWidth = caps[i].width;
m_captureHeight = caps[i].height;
if (!m_camera->startCapture(caps[i])) {
Unregister();
SendError("Stream camera error start capture");
}
break;
}
}
}
}
}
return GetState();
}
void TextureCamera::StopCapture()
{
if (m_camera && m_camera->captureStarted()) {
m_camera->stopCapture();
}
}
std::map<Encodable, Encodable> TextureCamera::Register(std::string cameraName)
{
m_textureId = m_plugin->RegisterTexture(
[this]([[maybe_unused]] size_t width, [[maybe_unused]] size_t height) {
return m_variant;
});
if (CreateCamera(cameraName) && m_viewWidth != 0 && m_viewHeight != 0) {
StartCapture(m_viewWidth, m_viewHeight);
}
return GetState();
}
std::map<Encodable, Encodable> TextureCamera::Unregister()
{
StopCapture();
m_error = "";
m_textureId = 0;
m_captureWidth = 0;
m_captureHeight = 0;
m_variant = nullptr;
m_camera = nullptr;
m_plugin->UnregisterTexture(m_textureId);
return GetState();
}
void TextureCamera::onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer)
{
if (buffer->handleType == Aurora::StreamCamera::HandleType::EGL && false) {
// @todo not tested
auto eglImage = TextureCameraEGLHelper::EGLCreateImage(buffer);
m_variant = new TextureVariant(FlutterEGLImage{
eglImage,
buffer->width,
buffer->height,
});
} else {
m_image = TextureCameraPixelsHelper::YUVtoARGB(buffer->mapYCbCr());
auto pixels = static_cast<uint8_t *>(m_image->bits());
m_variant = new TextureVariant(FlutterPixelBuffer{
pixels,
buffer->width,
buffer->height,
});
}
m_plugin->MarkTextureAvailable(m_textureId);
}
void TextureCamera::onCameraError(const std::string &errorDescription)
{
Unregister();
SendError(errorDescription);
}
void TextureCamera::onCameraParameterChanged([[maybe_unused]] Aurora::StreamCamera::CameraParameter parameter,
const std::string &value)
{
std::cout << "onCameraParameterChanged: " << value << std::endl;
}

42
packages/camera/camera_aurora/aurora/texture_camera_egl_helper.cpp

@ -0,0 +1,42 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <camera_aurora/texture_camera_egl_helper.h>
#include <flutter/platform-methods.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
static PFNEGLCREATEIMAGEKHRPROC eglCreateImageKHR;
static PFNEGLDESTROYIMAGEKHRPROC eglDestroyImageKHR;
void TextureCameraEGLHelper::EGLInit()
{
eglCreateImageKHR = reinterpret_cast<PFNEGLCREATEIMAGEKHRPROC>(
eglGetProcAddress("eglCreateImageKHR"));
eglDestroyImageKHR = reinterpret_cast<PFNEGLDESTROYIMAGEKHRPROC>(
eglGetProcAddress("eglDestroyImageKHR"));
}
EGLImageKHR TextureCameraEGLHelper::EGLCreateImage(
std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer)
{
auto display = PlatformMethods::GetEGLDisplay();
auto context = PlatformMethods::GetEGLContext();
const void *handle = buffer->handle;
GLint eglImgAttrs[] = {EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE, EGL_NONE};
return eglCreateImageKHR(display,
context,
EGL_NATIVE_BUFFER_ANDROID,
(EGLClientBuffer) handle,
eglImgAttrs);
}
void TextureCameraEGLHelper::EGLDestroyImage(EGLImageKHR image)
{
auto display = PlatformMethods::GetEGLDisplay();
eglDestroyImageKHR(display, image);
}

77
packages/camera/camera_aurora/aurora/texture_camera_pixels_helper.cpp

@ -0,0 +1,77 @@
/**
* SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <camera_aurora/texture_camera_pixels_helper.h>
QImage *TextureCameraPixelsHelper::YUVtoARGB(std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame> frame)
{
QSize size(frame->width, frame->height);
QImage *image = new QImage(size, QImage::Format_RGBA8888);
planarYuv420ToArgb(frame->y,
frame->cr,
frame->cb,
frame->yStride,
frame->cStride,
frame->cStride,
frame->chromaStep,
reinterpret_cast<quint32 *>(image->bits()),
frame->width,
frame->height);
return image;
}
quint32 TextureCameraPixelsHelper::yuvToArgb(qint32 y, qint32 rv, qint32 guv, qint32 bu, qint32 a = 255)
{
qint32 yy = (y - 16) * 298;
return (a << 24)
| qBound(0, (yy + rv) >> 8, 255) << 16
| qBound(0, (yy - guv) >> 8, 255) << 8
| qBound(0, (yy + bu) >> 8, 255);
}
void TextureCameraPixelsHelper::planarYuv420ToArgb(const uchar *y,
const uchar *u,
const uchar *v,
qint32 yStride,
qint32 uStride,
qint32 vStride,
qint32 uvPixelStride,
quint32 *rgb,
qint32 width,
qint32 height)
{
quint32 *rgb0 = rgb;
quint32 *rgb1 = rgb + width;
for (qint32 j = 0; j < height; j += 2) {
const uchar *lineY0 = y;
const uchar *lineY1 = y + yStride;
const uchar *lineU = u;
const uchar *lineV = v;
for (qint32 i = 0; i < width; i += 2) {
const qint32 uu = *lineU - 128;
const qint32 vv = *lineV - 128;
const qint32 rv = 409 * vv + 128;
const qint32 guv = 100 * uu + 208 * vv + 128;
const qint32 bu = 516 * uu + 128;
lineU += uvPixelStride;
lineV += uvPixelStride;
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu);
*rgb0++ = yuvToArgb(*lineY0++, rv, guv, bu);
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu);
*rgb1++ = yuvToArgb(*lineY1++, rv, guv, bu);
}
y += yStride << 1;
u += uStride;
v += vStride;
rgb0 += width;
rgb1 += width;
}
}

112
packages/camera/camera_aurora/lib/camera_aurora.dart

@ -1,13 +1,12 @@
// SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
// SPDX-License-Identifier: BSD-3-Clause
import 'dart:async';
import 'dart:convert';
import 'package:camera_aurora/camera_viewfinder.dart';
import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'camera_aurora_method_channel.dart';
import 'camera_aurora_platform_interface.dart';
class CameraAurora extends CameraPlatform {
@ -16,140 +15,83 @@ class CameraAurora extends CameraPlatform {
CameraPlatform.instance = CameraAurora();
}
// The stream for vending frames to platform interface clients.
StreamController<CameraImageData>? _frameStreamController;
/// Completes with a list of available cameras.
///
/// This method returns an empty list when no cameras are available.
@override
Future<List<CameraDescription>> availableCameras() =>
CameraAuroraPlatform.instance.availableCameras();
/// Creates an uninitialized camera instance and returns the cameraId.
@override
Future<void> initializeCamera(
int cameraId, {
ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
}) async {}
@override
Future<int> createCamera(
CameraDescription cameraDescription,
ResolutionPreset? resolutionPreset, {
bool enableAudio = false,
}) {
EventChannel(CameraAuroraEvents.cameraAuroraStreamedFrame.name)
.receiveBroadcastStream()
.listen((event) {
debugPrint(event);
});
return CameraAuroraPlatform.instance.createCamera(cameraDescription.name);
}
/// Initializes the camera on the device.
///
/// [imageFormatGroup] is used to specify the image formatting used.
/// On Android this defaults to ImageFormat.YUV_420_888 and applies only to the imageStream.
/// On iOS this defaults to kCVPixelFormatType_32BGRA.
/// On Web this parameter is currently not supported.
@override
Future<void> initializeCamera(
int cameraId, {
ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
}) async {
// init
return (await CameraAuroraPlatform.instance
.createCamera(cameraDescription.name))
.textureId;
}
/// Releases the resources of this camera.
@override
Future<void> dispose(int cameraId) {
return CameraAuroraPlatform.instance.dispose(cameraId);
}
Future<void> dispose(int cameraId) => CameraAuroraPlatform.instance.dispose();
/// Captures an image and returns the file where it was saved.
@override
Future<XFile> takePicture(int cameraId) =>
CameraAuroraPlatform.instance.takePicture(cameraId);
/// Starts a video recording.
///
/// The length of the recording can be limited by specifying the [maxVideoDuration].
/// By default no maximum duration is specified,
/// meaning the recording will continue until manually stopped.
/// With [maxVideoDuration] set the video is returned in a [VideoRecordedEvent]
/// through the [onVideoRecordedEvent] stream when the set duration is reached.
///
/// This method is deprecated in favour of [startVideoCapturing].
@override
Future<void> startVideoRecording(int cameraId,
{Duration? maxVideoDuration}) =>
CameraAuroraPlatform.instance.startVideoRecording(cameraId);
/// Stops the video recording and returns the file where it was saved.
@override
Future<XFile> stopVideoRecording(int cameraId) =>
CameraAuroraPlatform.instance.stopVideoRecording(cameraId);
/// Pause video recording.
@override
Future<void> pauseVideoRecording(int cameraId) =>
CameraAuroraPlatform.instance.pauseVideoRecording(cameraId);
/// Resume video recording after pausing.
@override
Future<void> resumeVideoRecording(int cameraId) =>
CameraAuroraPlatform.instance.resumeVideoRecording(cameraId);
/// The ui orientation changed.
///
/// Implementations for this:
/// - Should support all 4 orientations.
@override
Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() async* {
yield const DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
}
/// The camera has been initialized.
@override
Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) async* {
yield CameraInitializedEvent(
cameraId,
// previewWidth
400,
// previewHeight
400,
// exposureMode
0,
0,
ExposureMode.auto,
// exposurePointSupported
true,
// focusMode
FocusMode.auto,
// focusPointSupported
true,
);
}
@override
Stream<CameraImageData> onStreamedFrameAvailable(
int cameraId, {
CameraImageStreamOptions? options,
}) {
_frameStreamController = StreamController<CameraImageData>(
onListen: () =>
CameraAuroraPlatform.instance.streamedFrame(cameraId).listen((data) {
_frameStreamController!.add(data);
}),
onPause: () => {},
onResume: () => {},
onCancel: () => {},
);
return _frameStreamController!.stream;
Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() async* {
yield const DeviceOrientationChangedEvent(DeviceOrientation.landscapeLeft);
}
/// Returns a widget showing a live camera preview.
@override
Widget buildPreview(int cameraId) {
return Center(
child: Text(
'Camera: $cameraId',
style:
const TextStyle(fontWeight: FontWeight.bold, color: Colors.white),
),
);
if (cameraId != 0) {
return LayoutBuilder(builder: (
BuildContext context,
BoxConstraints constraints,
) {
return CameraViewfinder(
width: constraints.maxWidth,
height: constraints.maxHeight,
);
});
}
return const SizedBox.shrink();
}
}

113
packages/camera/camera_aurora/lib/camera_aurora_method_channel.dart

@ -7,12 +7,14 @@ import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import 'camera_aurora_platform_interface.dart';
import 'camera_data.dart';
enum CameraAuroraMethods {
availableCameras,
createCamera,
dispose,
initializeCamera,
startCapture,
stopCapture,
takePicture,
startVideoRecording,
stopVideoRecording,
@ -21,9 +23,7 @@ enum CameraAuroraMethods {
}
enum CameraAuroraEvents {
cameraAuroraReadyForCapture,
cameraAuroraImageSaved,
cameraAuroraStreamedFrame,
cameraAuroraStateChanged,
}
/// An implementation of [CameraAuroraPlatform] that uses method channels.
@ -32,6 +32,16 @@ class MethodChannelCameraAurora extends CameraAuroraPlatform {
@visibleForTesting
final methodsChannel = const MethodChannel('camera_aurora');
@override
Stream<CameraState> onChangeState() async* {
await for (final data
in EventChannel(CameraAuroraEvents.cameraAuroraStateChanged.name)
.receiveBroadcastStream()) {
yield CameraState.fromJson(data);
}
}
/// Camera
@override
Future<List<CameraDescription>> availableCameras() async {
final List<CameraDescription> result = [];
@ -40,19 +50,20 @@ class MethodChannelCameraAurora extends CameraAuroraPlatform {
CameraAuroraMethods.availableCameras.name) ??
[];
for (int i = 0; i < cameras.length; i++) {
final camera = cameras[i] as Map<dynamic, dynamic>;
final pos = camera['position'];
final lensDirection = pos == 1
? CameraLensDirection.back
: (pos == 2
? CameraLensDirection.front
: CameraLensDirection.external);
for (final camera in cameras) {
final data = camera['id'].split(':');
var direction = CameraLensDirection.external;
if (data[1].toString().contains('rear')) {
direction = CameraLensDirection.back;
} else if (data[1].toString().contains('front')) {
direction = CameraLensDirection.front;
}
result.add(CameraDescription(
name: camera['deviceName'],
lensDirection: lensDirection,
sensorOrientation: camera['orientation'],
name: camera['id'],
lensDirection: direction,
sensorOrientation: camera['mountAngle'],
));
}
@ -60,52 +71,52 @@ class MethodChannelCameraAurora extends CameraAuroraPlatform {
}
@override
Future<int> createCamera(String cameraName) async {
return await methodsChannel
.invokeMethod<Object?>(CameraAuroraMethods.createCamera.name, {
Future<CameraState> createCamera(String cameraName) async {
final data = await methodsChannel
.invokeMethod<Map<dynamic, dynamic>?>('createCamera', {
'cameraName': cameraName,
}) as int;
});
return CameraState.fromJson(data ?? {});
}
@override
Future<void> dispose(int cameraId) {
return methodsChannel
.invokeMethod<Object?>(CameraAuroraMethods.dispose.name, {
'cameraId': cameraId,
Future<void> startCapture(double width, double height) async {
await methodsChannel
.invokeMethod<Object?>(CameraAuroraMethods.startCapture.name, {
'width': width.round(),
'height': height.round(),
});
}
@override
Future<void> initializeCamera(
int cameraId, {
ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
}) async {
Future<void> stopCapture() async {
await methodsChannel
.invokeMethod<Object?>(CameraAuroraMethods.initializeCamera.name, {
'cameraId': cameraId,
}) as int;
.invokeMethod<Object?>(CameraAuroraMethods.stopCapture.name, {});
}
@override
Future<void> dispose() async {
await methodsChannel
.invokeMethod<Object?>(CameraAuroraMethods.dispose.name);
}
/// Make Photo
@override
Future<XFile> takePicture(int cameraId) async {
final result = await methodsChannel.invokeMethod<Object?>(
final image = await methodsChannel.invokeMethod<String?>(
CameraAuroraMethods.takePicture.name,
{'cameraId': cameraId},
);
if (result == true) {
await for (final data
in EventChannel(CameraAuroraEvents.cameraAuroraImageSaved.name)
.receiveBroadcastStream()) {
final response =
(data as List<Object?>).map((e) => e.toString()).toList();
return XFile(response[1], mimeType: 'image/jpeg');
}
}
throw "Error take picture";
final bytes = base64Decode(image!);
return XFile.fromData(
bytes,
name: 'temp.jpg',
mimeType: 'image/jpeg',
length: bytes.length,
);
}
// Record Video
@override
Future<void> startVideoRecording(int cameraId,
{Duration? maxVideoDuration}) async {
@ -148,20 +159,4 @@ class MethodChannelCameraAurora extends CameraAuroraPlatform {
'cameraId': cameraId,
});
}
@override
Stream<CameraImageData> streamedFrame(int cameraId) async* {
await for (final data
in EventChannel(CameraAuroraEvents.cameraAuroraStreamedFrame.name)
.receiveBroadcastStream()) {
debugPrint(data);
yield const CameraImageData(
format: CameraImageFormat(ImageFormatGroup.yuv420, raw: 0),
planes: [],
height: 100,
width: 100,
);
}
}
}

31
packages/camera/camera_aurora/lib/camera_aurora_platform_interface.dart

@ -4,6 +4,7 @@ import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:plugin_platform_interface/plugin_platform_interface.dart';
import 'camera_aurora_method_channel.dart';
import 'camera_data.dart';
abstract class CameraAuroraPlatform extends PlatformInterface {
/// Constructs a CameraAuroraPlatform.
@ -26,29 +27,37 @@ abstract class CameraAuroraPlatform extends PlatformInterface {
_instance = instance;
}
Stream<CameraState> onChangeState() {
throw UnimplementedError('onChangeState() has not been implemented.');
}
/// Camera
Future<List<CameraDescription>> availableCameras() {
throw UnimplementedError('availableCameras() has not been implemented.');
}
Future<int> createCamera(String cameraName) {
throw UnimplementedError('createCamera() has not been implemented.');
Future<void> startCapture(double width, double height) {
throw UnimplementedError('startCapture() has not been implemented.');
}
Future<void> dispose(int cameraId) {
throw UnimplementedError('dispose() has not been implemented.');
Future<void> stopCapture() {
throw UnimplementedError('startCapture() has not been implemented.');
}
Future<CameraState> createCamera(String cameraName) {
throw UnimplementedError('createCamera() has not been implemented.');
}
Future<void> initializeCamera(
int cameraId, {
ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
}) {
throw UnimplementedError('initializeCamera() has not been implemented.');
Future<void> dispose() {
throw UnimplementedError('dispose() has not been implemented.');
}
// make photo
Future<XFile> takePicture(int cameraId) {
throw UnimplementedError('takePicture() has not been implemented.');
}
// record video
Future<void> startVideoRecording(int cameraId) {
throw UnimplementedError('startVideoRecording() has not been implemented.');
}
@ -65,8 +74,4 @@ abstract class CameraAuroraPlatform extends PlatformInterface {
throw UnimplementedError(
'resumeVideoRecording() has not been implemented.');
}
Stream<CameraImageData> streamedFrame(int cameraId) {
throw UnimplementedError('streamedFrame() has not been implemented.');
}
}

171
packages/camera/camera_aurora/lib/camera_data.dart

@ -1,155 +1,38 @@
// SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
// SPDX-License-Identifier: BSD-3-Clause
import 'dart:async';
import 'dart:convert';
import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'camera_aurora_method_channel.dart';
import 'camera_aurora_platform_interface.dart';
class CameraAurora extends CameraPlatform {
/// Registers this class as the default instance of [CameraPlatform].
static void registerWith() {
CameraPlatform.instance = CameraAurora();
}
// The stream for vending frames to platform interface clients.
StreamController<CameraImageData>? _frameStreamController;
/// Completes with a list of available cameras.
///
/// This method returns an empty list when no cameras are available.
@override
Future<List<CameraDescription>> availableCameras() =>
CameraAuroraPlatform.instance.availableCameras();
/// Creates an uninitialized camera instance and returns the cameraId.
@override
Future<int> createCamera(
CameraDescription cameraDescription,
ResolutionPreset? resolutionPreset, {
bool enableAudio = false,
}) {
EventChannel(CameraAuroraEvents.cameraAuroraStreamedFrame.name)
.receiveBroadcastStream()
.listen((event) {
debugPrint(event);
});
return CameraAuroraPlatform.instance.createCamera(cameraDescription.name);
}
/// Initializes the camera on the device.
///
/// [imageFormatGroup] is used to specify the image formatting used.
/// On Android this defaults to ImageFormat.YUV_420_888 and applies only to the imageStream.
/// On iOS this defaults to kCVPixelFormatType_32BGRA.
/// On Web this parameter is currently not supported.
@override
Future<void> initializeCamera(
int cameraId, {
ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
}) async {
// init
}
/// Releases the resources of this camera.
@override
Future<void> dispose(int cameraId) {
return CameraAuroraPlatform.instance.dispose(cameraId);
}
/// Captures an image and returns the file where it was saved.
@override
Future<XFile> takePicture(int cameraId) =>
CameraAuroraPlatform.instance.takePicture(cameraId);
/// Starts a video recording.
///
/// The length of the recording can be limited by specifying the [maxVideoDuration].
/// By default no maximum duration is specified,
/// meaning the recording will continue until manually stopped.
/// With [maxVideoDuration] set the video is returned in a [VideoRecordedEvent]
/// through the [onVideoRecordedEvent] stream when the set duration is reached.
///
/// This method is deprecated in favour of [startVideoCapturing].
@override
Future<void> startVideoRecording(int cameraId,
{Duration? maxVideoDuration}) =>
CameraAuroraPlatform.instance.startVideoRecording(cameraId);
enum OrientationEvent {
undefined,
portrait,
landscape,
portraitFlipped,
landscapeFlipped,
}
/// Stops the video recording and returns the file where it was saved.
@override
Future<XFile> stopVideoRecording(int cameraId) =>
CameraAuroraPlatform.instance.stopVideoRecording(cameraId);
class CameraState {
CameraState.fromJson(Map<dynamic, dynamic> json)
: id = json['id'] ?? "",
textureId = json['textureId'] ?? -1,
width = (json['width'] ?? 0).toDouble(),
height = (json['height'] ?? 0).toDouble(),
rotationCamera = json['rotationCamera'] ?? 0,
rotationDisplay = json['rotationDisplay'] ?? 0,
error = json['error'] ?? '';
/// Pause video recording.
@override
Future<void> pauseVideoRecording(int cameraId) =>
CameraAuroraPlatform.instance.pauseVideoRecording(cameraId);
final String id;
final int textureId;
final double width;
final double height;
final int rotationCamera;
final int rotationDisplay;
final String error;
/// Resume video recording after pausing.
@override
Future<void> resumeVideoRecording(int cameraId) =>
CameraAuroraPlatform.instance.resumeVideoRecording(cameraId);
bool isNotEmpty() => textureId != -1;
/// The ui orientation changed.
///
/// Implementations for this:
/// - Should support all 4 orientations.
@override
Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() async* {
yield const DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
}
/// The camera has been initialized.
@override
Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) async* {
yield CameraInitializedEvent(
cameraId,
// previewWidth
400,
// previewHeight
400,
// exposureMode
ExposureMode.auto,
// exposurePointSupported
true,
// focusMode
FocusMode.auto,
// focusPointSupported
true,
);
}
@override
Stream<CameraImageData> onStreamedFrameAvailable(
int cameraId, {
CameraImageStreamOptions? options,
}) {
_frameStreamController = StreamController<CameraImageData>(
onListen: () =>
CameraAuroraPlatform.instance.streamedFrame(cameraId).listen((data) {
_frameStreamController!.add(data);
}),
onPause: () => {},
onResume: () => {},
onCancel: () => {},
);
return _frameStreamController!.stream;
}
bool hasError() => error.isNotEmpty;
/// Returns a widget showing a live camera preview.
@override
Widget buildPreview(int cameraId) {
return Center(
child: Text(
'Camera: $cameraId',
style:
const TextStyle(fontWeight: FontWeight.bold, color: Colors.white),
),
);
String toString() {
return '{id: $id, textureId: $textureId, width: $width, height: $height, rotationCamera: $rotationCamera, rotationDisplay: $rotationDisplay, error: $error}';
}
}

220
packages/camera/camera_aurora/lib/camera_viewfinder.dart

@ -1,155 +1,103 @@
// SPDX-FileCopyrightText: Copyright 2023 Open Mobile Platform LLC <community@omp.ru>
// SPDX-License-Identifier: BSD-3-Clause
import 'dart:async';
import 'dart:convert';
import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:camera_aurora/camera_aurora_platform_interface.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'camera_aurora_method_channel.dart';
import 'camera_aurora_platform_interface.dart';
class CameraAurora extends CameraPlatform {
/// Registers this class as the default instance of [CameraPlatform].
static void registerWith() {
CameraPlatform.instance = CameraAurora();
}
// The stream for vending frames to platform interface clients.
StreamController<CameraImageData>? _frameStreamController;
/// Completes with a list of available cameras.
///
/// This method returns an empty list when no cameras are available.
@override
Future<List<CameraDescription>> availableCameras() =>
CameraAuroraPlatform.instance.availableCameras();
/// Creates an uninitialized camera instance and returns the cameraId.
@override
Future<int> createCamera(
CameraDescription cameraDescription,
ResolutionPreset? resolutionPreset, {
bool enableAudio = false,
}) {
EventChannel(CameraAuroraEvents.cameraAuroraStreamedFrame.name)
.receiveBroadcastStream()
.listen((event) {
debugPrint(event);
});
return CameraAuroraPlatform.instance.createCamera(cameraDescription.name);
}
/// Initializes the camera on the device.
///
/// [imageFormatGroup] is used to specify the image formatting used.
/// On Android this defaults to ImageFormat.YUV_420_888 and applies only to the imageStream.
/// On iOS this defaults to kCVPixelFormatType_32BGRA.
/// On Web this parameter is currently not supported.
@override
Future<void> initializeCamera(
int cameraId, {
ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
}) async {
// init
}
/// Releases the resources of this camera.
@override
Future<void> dispose(int cameraId) {
return CameraAuroraPlatform.instance.dispose(cameraId);
}
import 'camera_data.dart';
/// Captures an image and returns the file where it was saved.
@override
Future<XFile> takePicture(int cameraId) =>
CameraAuroraPlatform.instance.takePicture(cameraId);
class CameraViewfinder extends StatefulWidget {
const CameraViewfinder({
super.key,
required this.width,
required this.height,
});
/// Starts a video recording.
///
/// The length of the recording can be limited by specifying the [maxVideoDuration].
/// By default no maximum duration is specified,
/// meaning the recording will continue until manually stopped.
/// With [maxVideoDuration] set the video is returned in a [VideoRecordedEvent]
/// through the [onVideoRecordedEvent] stream when the set duration is reached.
///
/// This method is deprecated in favour of [startVideoCapturing].
@override
Future<void> startVideoRecording(int cameraId,
{Duration? maxVideoDuration}) =>
CameraAuroraPlatform.instance.startVideoRecording(cameraId);
/// Stops the video recording and returns the file where it was saved.
@override
Future<XFile> stopVideoRecording(int cameraId) =>
CameraAuroraPlatform.instance.stopVideoRecording(cameraId);
final double width;
final double height;
/// Pause video recording.
@override
Future<void> pauseVideoRecording(int cameraId) =>
CameraAuroraPlatform.instance.pauseVideoRecording(cameraId);
/// Resume video recording after pausing.
@override
Future<void> resumeVideoRecording(int cameraId) =>
CameraAuroraPlatform.instance.resumeVideoRecording(cameraId);
State<CameraViewfinder> createState() => _CameraViewfinderState();
}
/// The ui orientation changed.
///
/// Implementations for this:
/// - Should support all 4 orientations.
@override
Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() async* {
yield const DeviceOrientationChangedEvent(DeviceOrientation.portraitUp);
}
class _CameraViewfinderState extends State<CameraViewfinder> {
CameraState _cameraState = CameraState.fromJson({});
/// The camera has been initialized.
@override
Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) async* {
yield CameraInitializedEvent(
cameraId,
// previewWidth
400,
// previewHeight
400,
// exposureMode
ExposureMode.auto,
// exposurePointSupported
true,
// focusMode
FocusMode.auto,
// focusPointSupported
true,
);
initState() {
super.initState();
CameraAuroraPlatform.instance.startCapture(widget.width, widget.height);
CameraAuroraPlatform.instance.onChangeState().listen((event) {
if (mounted) {
setState(() {
debugPrint(_cameraState.toString());
_cameraState = event;
});
}
});
}
@override
Stream<CameraImageData> onStreamedFrameAvailable(
int cameraId, {
CameraImageStreamOptions? options,
}) {
_frameStreamController = StreamController<CameraImageData>(
onListen: () =>
CameraAuroraPlatform.instance.streamedFrame(cameraId).listen((data) {
_frameStreamController!.add(data);
}),
onPause: () => {},
onResume: () => {},
onCancel: () => {},
);
return _frameStreamController!.stream;
void dispose() {
super.dispose();
CameraAuroraPlatform.instance.stopCapture();
}
/// Returns a widget showing a live camera preview.
@override
Widget buildPreview(int cameraId) {
return Center(
child: Text(
'Camera: $cameraId',
style:
const TextStyle(fontWeight: FontWeight.bold, color: Colors.white),
),
);
Widget build(BuildContext context) {
if (_cameraState.hasError()) {
return Center(
child: Text(
'Error: ${_cameraState.error}',
style:
const TextStyle(fontWeight: FontWeight.bold, color: Colors.white),
),
);
} else if (_cameraState.isNotEmpty()) {
int turn = 0;
switch (_cameraState.rotationDisplay) {
case 0:
turn = _cameraState.id.contains('front') ? -1 : 1;
break;
case 90:
turn = 0;
break;
case 180:
turn = _cameraState.id.contains('front') ? 1 : -1;
break;
default: // 270
turn = 2;
}
double height = 10;
double width = 10;
// widget.width = widget.height
// _cameraState.height = _cameraState.width
if (_cameraState.height != 0 && _cameraState.width != 0) {
if (_cameraState.rotationDisplay == 90 ||
_cameraState.rotationDisplay == 270) {
width = widget.width * _cameraState.height / _cameraState.width;
height = widget.height * _cameraState.width / _cameraState.height;
} else {
width = _cameraState.height * widget.height / _cameraState.width;
height = _cameraState.width * widget.width / _cameraState.height;
}
}
return RotatedBox(
quarterTurns: turn,
child: SizedBox(
width: height, // height
height: width, // widht
child: Opacity(
opacity: _cameraState.height == 0 ? 0 : 1,
child: Texture(textureId: _cameraState.textureId),
),
),
);
}
return const SizedBox.shrink();
}
}

43
packages/camera/camera_aurora/lib/type_conversion.dart

@ -1,43 +0,0 @@
import 'dart:typed_data';
import 'package:camera_platform_interface/camera_platform_interface.dart';
CameraImageData cameraImageFromPlatformData(Map<dynamic, dynamic> data) {
return CameraImageData(
format: _cameraImageFormatFromPlatformData(data['format']),
height: data['height'] as int,
width: data['width'] as int,
lensAperture: data['lensAperture'] as double?,
sensorExposureTime: data['sensorExposureTime'] as int?,
sensorSensitivity: data['sensorSensitivity'] as double?,
planes: List<CameraImagePlane>.unmodifiable(
(data['planes'] as List<dynamic>).map<CameraImagePlane>(
(dynamic planeData) => _cameraImagePlaneFromPlatformData(
planeData as Map<dynamic, dynamic>))));
}
CameraImageFormat _cameraImageFormatFromPlatformData(dynamic data) {
return CameraImageFormat(_imageFormatGroupFromPlatformData(data), raw: data);
}
ImageFormatGroup _imageFormatGroupFromPlatformData(dynamic data) {
switch (data) {
case 35: // android.graphics.ImageFormat.YUV_420_888
return ImageFormatGroup.yuv420;
case 256: // android.graphics.ImageFormat.JPEG
return ImageFormatGroup.jpeg;
case 17: // android.graphics.ImageFormat.NV21
return ImageFormatGroup.nv21;
}
return ImageFormatGroup.unknown;
}
CameraImagePlane _cameraImagePlaneFromPlatformData(Map<dynamic, dynamic> data) {
return CameraImagePlane(
bytes: data['bytes'] as Uint8List,
bytesPerPixel: data['bytesPerPixel'] as int?,
bytesPerRow: data['bytesPerRow'] as int,
height: data['height'] as int?,
width: data['width'] as int?);
}

1
packages/camera/camera_aurora/pubspec.yaml

@ -15,6 +15,7 @@ dependencies:
plugin_platform_interface: ^2.0.2
camera_platform_interface: ^2.6.0
image: ^4.1.3
async: ^2.9.0
dev_dependencies:
flutter_test:

Loading…
Cancel
Save