|
|
|
@ -22,8 +22,49 @@ TextureCamera::TextureCamera(TextureRegistrar *plugin, const CameraErrorHandler
|
|
|
|
|
: m_plugin(plugin) |
|
|
|
|
, m_onError(onError) |
|
|
|
|
, m_manager(StreamCameraManager()) |
|
|
|
|
, m_camera(nullptr) |
|
|
|
|
{} |
|
|
|
|
|
|
|
|
|
std::string TextureCamera::GetImageBase64() |
|
|
|
|
{ |
|
|
|
|
if (m_frame && m_camera) { |
|
|
|
|
Aurora::StreamCamera::CameraInfo info; |
|
|
|
|
if (m_camera->getInfo(info)) { |
|
|
|
|
if (m_frame->chromaStep == 1) { |
|
|
|
|
auto result = YUVI420Rotate( |
|
|
|
|
m_frame->y, |
|
|
|
|
m_frame->cr, |
|
|
|
|
m_frame->cb, |
|
|
|
|
m_frame->width, |
|
|
|
|
m_frame->height, |
|
|
|
|
info.mountAngle |
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
QBuffer qbuffer; |
|
|
|
|
qbuffer.open(QIODevice::WriteOnly); |
|
|
|
|
QSize size(result.width, result.height); |
|
|
|
|
QImage image(size, QImage::Format_RGBA8888); |
|
|
|
|
|
|
|
|
|
libyuv::I420ToARGB( |
|
|
|
|
result.y, result.strideY, |
|
|
|
|
result.u, result.strideU, |
|
|
|
|
result.v, result.strideV, |
|
|
|
|
reinterpret_cast<uint8_t *>(image.bits()), |
|
|
|
|
result.width * 4, |
|
|
|
|
result.width, |
|
|
|
|
result.height |
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
image.save(&qbuffer, "JPEG"); |
|
|
|
|
|
|
|
|
|
return qbuffer.data().toBase64().toStdString(); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
return ""; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
std::vector<Encodable> TextureCamera::GetAvailableCameras() |
|
|
|
|
{ |
|
|
|
|
std::vector<Encodable> cameras; |
|
|
|
@ -52,12 +93,12 @@ std::map<Encodable, Encodable> TextureCamera::GetState()
|
|
|
|
|
|
|
|
|
|
auto orientation = static_cast<int>(PlatformMethods::GetOrientation()); |
|
|
|
|
|
|
|
|
|
return std::map<Encodable, Encodable>{ |
|
|
|
|
return std::map<Encodable, Encodable> { |
|
|
|
|
{"id", info.id}, |
|
|
|
|
{"textureId", m_textureId}, |
|
|
|
|
{"width", m_captureWidth}, |
|
|
|
|
{"height", m_captureHeight}, |
|
|
|
|
{"rotationCamera", info.mountAngle}, |
|
|
|
|
{"mountAngle", info.mountAngle}, |
|
|
|
|
{"rotationDisplay", orientation}, |
|
|
|
|
{"error", m_error}, |
|
|
|
|
}; |
|
|
|
@ -97,32 +138,18 @@ void TextureCamera::SendError(std::string error)
|
|
|
|
|
m_onError(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
std::map<Encodable, Encodable> TextureCamera::StartCapture() |
|
|
|
|
std::map<Encodable, Encodable> TextureCamera::StartCapture(size_t width, size_t height) |
|
|
|
|
{ |
|
|
|
|
m_viewWidth = width; |
|
|
|
|
m_viewHeight = height; |
|
|
|
|
|
|
|
|
|
if (m_camera) { |
|
|
|
|
if (m_camera->captureStarted()) { |
|
|
|
|
m_camera->stopCapture(); |
|
|
|
|
} |
|
|
|
|
Aurora::StreamCamera::CameraInfo info; |
|
|
|
|
if (m_camera->getInfo(info)) { |
|
|
|
|
std::vector<Aurora::StreamCamera::CameraCapability> caps; |
|
|
|
|
|
|
|
|
|
if (m_manager->queryCapabilities(info.id, caps)) { |
|
|
|
|
|
|
|
|
|
auto cap = caps.back(); |
|
|
|
|
|
|
|
|
|
// for(int i = caps.size()-1; i >= 0; i--) {
|
|
|
|
|
// if (displayWidth + displayHeight >= caps[i].width + caps[i].height) {
|
|
|
|
|
// cap = caps[i];
|
|
|
|
|
// break;
|
|
|
|
|
// }
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
std::cout << "Version swscale_version: " << swscale_version() << std::endl; |
|
|
|
|
|
|
|
|
|
m_captureWidth = cap.width; |
|
|
|
|
m_captureHeight = cap.height; |
|
|
|
|
|
|
|
|
|
ResizeFrame(width, height, info, cap); |
|
|
|
|
if (!m_camera->startCapture(cap)) { |
|
|
|
|
Unregister(); |
|
|
|
|
SendError("Stream camera error start capture"); |
|
|
|
@ -137,6 +164,7 @@ std::map<Encodable, Encodable> TextureCamera::StartCapture()
|
|
|
|
|
void TextureCamera::StopCapture() |
|
|
|
|
{ |
|
|
|
|
if (m_camera && m_camera->captureStarted()) { |
|
|
|
|
m_frame = nullptr; |
|
|
|
|
m_camera->stopCapture(); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
@ -145,7 +173,7 @@ std::map<Encodable, Encodable> TextureCamera::Register(std::string cameraName)
|
|
|
|
|
{ |
|
|
|
|
m_textureId = m_plugin->RegisterTexture( |
|
|
|
|
[this]([[maybe_unused]] size_t width, [[maybe_unused]] size_t height) -> std::optional<TextureVariant> { |
|
|
|
|
if (m_bits) { |
|
|
|
|
if (m_bits && m_captureWidth != 0 && m_captureHeight != 0) { |
|
|
|
|
return std::make_optional(TextureVariant(FlutterPixelBuffer{ |
|
|
|
|
m_bits, |
|
|
|
|
m_captureWidth, |
|
|
|
@ -155,82 +183,129 @@ std::map<Encodable, Encodable> TextureCamera::Register(std::string cameraName)
|
|
|
|
|
return std::nullopt; |
|
|
|
|
}); |
|
|
|
|
|
|
|
|
|
if (CreateCamera(cameraName)) { |
|
|
|
|
StartCapture(); |
|
|
|
|
if (CreateCamera(cameraName) && m_viewWidth != 0) { |
|
|
|
|
StartCapture(m_viewWidth, m_viewHeight); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
return GetState(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
std::map<Encodable, Encodable> TextureCamera::Unregister() |
|
|
|
|
std::map<Encodable, Encodable> TextureCamera::Unregister() |
|
|
|
|
{ |
|
|
|
|
StopCapture(); |
|
|
|
|
|
|
|
|
|
m_plugin->UnregisterTexture(m_textureId); |
|
|
|
|
|
|
|
|
|
m_error = ""; |
|
|
|
|
m_counter = 0; |
|
|
|
|
m_textureId = 0; |
|
|
|
|
m_captureWidth = 0; |
|
|
|
|
m_captureHeight = 0; |
|
|
|
|
m_bits = nullptr; |
|
|
|
|
m_camera = nullptr; |
|
|
|
|
|
|
|
|
|
m_plugin->UnregisterTexture(m_textureId); |
|
|
|
|
return GetState(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
std::map<Encodable, Encodable> TextureCamera::ResizeFrame(size_t width, size_t height) |
|
|
|
|
{ |
|
|
|
|
if (m_camera && m_camera->captureStarted() && !(width == m_captureWidth || height == m_captureHeight)) { |
|
|
|
|
Aurora::StreamCamera::CameraInfo info; |
|
|
|
|
if (m_camera->getInfo(info)) { |
|
|
|
|
std::vector<Aurora::StreamCamera::CameraCapability> caps; |
|
|
|
|
if (m_manager->queryCapabilities(info.id, caps)) { |
|
|
|
|
auto cap = caps.back(); |
|
|
|
|
ResizeFrame(width, height, info, cap); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
return GetState(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void TextureCamera::onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer) |
|
|
|
|
void TextureCamera::ResizeFrame(size_t width, size_t height, Aurora::StreamCamera::CameraInfo info, Aurora::StreamCamera::CameraCapability cap) |
|
|
|
|
{ |
|
|
|
|
auto cw = cap.width; |
|
|
|
|
auto ch = cap.height; |
|
|
|
|
|
|
|
|
|
auto dw = width < 500 ? 500 : width + 100; |
|
|
|
|
auto dh = height < 500 ? 500 : height + 100; |
|
|
|
|
|
|
|
|
|
if (info.mountAngle == 270 || info.mountAngle == 90) { |
|
|
|
|
cw = cap.height; |
|
|
|
|
ch = cap.width; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
m_bits = nullptr; |
|
|
|
|
|
|
|
|
|
m_captureHeight = dh; |
|
|
|
|
m_captureWidth = (cw * dh) / ch; |
|
|
|
|
|
|
|
|
|
if (m_captureWidth > dw) { |
|
|
|
|
m_captureWidth = dw; |
|
|
|
|
m_captureHeight = (ch * dw) / cw; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
std::optional<std::shared_ptr<const Aurora::StreamCamera::YCbCrFrame>> TextureCamera::GetFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer) |
|
|
|
|
{ |
|
|
|
|
m_counter += 1; |
|
|
|
|
if (m_counter < 0) m_counter = 0; |
|
|
|
|
if (m_counter %3 == 0) return; |
|
|
|
|
|
|
|
|
|
auto frame = buffer->mapYCbCr(); |
|
|
|
|
|
|
|
|
|
unsigned int displayWidth = PlatformMethods::GetDisplayWidth(); |
|
|
|
|
unsigned int displayHeight = PlatformMethods::GetDisplayHeight(); |
|
|
|
|
|
|
|
|
|
auto bits = std::shared_ptr<uint8_t>((uint8_t *) malloc(frame->width * 4 * frame->height), free); |
|
|
|
|
|
|
|
|
|
auto yPtr = std::shared_ptr<uint8_t>((uint8_t *) malloc(frame->width * 4 * frame->height), free); |
|
|
|
|
auto uPtr = std::shared_ptr<uint8_t>((uint8_t *) malloc(frame->width * 4 * frame->height), free); |
|
|
|
|
auto vPtr = std::shared_ptr<uint8_t>((uint8_t *) malloc(frame->width * 4 * frame->height), free); |
|
|
|
|
|
|
|
|
|
libyuv::I420Scale( |
|
|
|
|
frame->y, |
|
|
|
|
frame->yStride, |
|
|
|
|
frame->cr, |
|
|
|
|
frame->cStride, |
|
|
|
|
frame->cb, |
|
|
|
|
frame->cStride, |
|
|
|
|
frame->width, |
|
|
|
|
frame->height, |
|
|
|
|
yPtr.get(), |
|
|
|
|
frame->yStride, |
|
|
|
|
uPtr.get(), |
|
|
|
|
frame->cStride, |
|
|
|
|
vPtr.get(), |
|
|
|
|
frame->cStride, |
|
|
|
|
frame->width, |
|
|
|
|
frame->height, |
|
|
|
|
libyuv::kFilterBilinear |
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
libyuv::Android420ToARGB( |
|
|
|
|
yPtr.get(), |
|
|
|
|
frame->yStride, |
|
|
|
|
uPtr.get(), |
|
|
|
|
frame->cStride, |
|
|
|
|
vPtr.get(), |
|
|
|
|
frame->cStride, |
|
|
|
|
frame->chromaStep, |
|
|
|
|
bits.get(), |
|
|
|
|
frame->width * 4, |
|
|
|
|
frame->width, |
|
|
|
|
frame->height |
|
|
|
|
); |
|
|
|
|
if (m_counter < 0) { |
|
|
|
|
m_counter = 0; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if (m_counter %3 == 0 || !m_camera) { |
|
|
|
|
return std::nullopt; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
m_frame = buffer->mapYCbCr(); |
|
|
|
|
|
|
|
|
|
return m_frame; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void TextureCamera::onCameraFrame(std::shared_ptr<Aurora::StreamCamera::GraphicBuffer> buffer) |
|
|
|
|
{ |
|
|
|
|
if (auto optional = GetFrame(buffer)) { |
|
|
|
|
|
|
|
|
|
auto frame = optional.value(); |
|
|
|
|
|
|
|
|
|
if (!m_camera || !m_camera->captureStarted()) { |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
auto result = YUVI420Scale( |
|
|
|
|
frame->y, |
|
|
|
|
frame->cr, |
|
|
|
|
frame->cb, |
|
|
|
|
frame->width, |
|
|
|
|
frame->height, |
|
|
|
|
m_captureWidth, |
|
|
|
|
m_captureHeight |
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
if (!m_camera || !m_camera->captureStarted()) { |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
auto bits = std::shared_ptr<uint8_t>((uint8_t *) malloc(result.width * result.height * 4), free); |
|
|
|
|
|
|
|
|
|
m_bits = bits; |
|
|
|
|
m_plugin->MarkTextureAvailable(m_textureId); |
|
|
|
|
libyuv::I420ToARGB( |
|
|
|
|
result.y, result.strideY, |
|
|
|
|
result.u, result.strideU, |
|
|
|
|
result.v, result.strideV, |
|
|
|
|
bits.get(), |
|
|
|
|
result.width * 4, |
|
|
|
|
result.width, |
|
|
|
|
result.height |
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
if (!m_camera || !m_camera->captureStarted()) { |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
m_bits = bits; |
|
|
|
|
m_plugin->MarkTextureAvailable(m_textureId); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void TextureCamera::onCameraError(const std::string &errorDescription) |
|
|
|
@ -244,3 +319,108 @@ void TextureCamera::onCameraParameterChanged([[maybe_unused]] Aurora::StreamCame
|
|
|
|
|
{ |
|
|
|
|
std::cout << "onCameraParameterChanged: " << value << std::endl; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
ResultYUV TextureCamera::YUVI420Scale( |
|
|
|
|
const uint8_t* srcY, |
|
|
|
|
const uint8_t* srcU, |
|
|
|
|
const uint8_t* srcV, |
|
|
|
|
int srcWidth, |
|
|
|
|
int srcHeight, |
|
|
|
|
int outWidth, |
|
|
|
|
int outHeight |
|
|
|
|
) |
|
|
|
|
{ |
|
|
|
|
auto bufSize = (((outWidth * outHeight) + ((outWidth + 1) / 2) * ((outHeight + 1) / 2))) * 2; |
|
|
|
|
auto buf = std::shared_ptr<uint8_t>((uint8_t *) malloc(bufSize), free); |
|
|
|
|
|
|
|
|
|
auto y = buf.get(); |
|
|
|
|
auto u = buf.get() + outWidth * outHeight; |
|
|
|
|
auto v = buf.get() + outWidth * outHeight + (outWidth * outHeight + 3) / 4; |
|
|
|
|
|
|
|
|
|
auto srcStrideY = srcWidth; |
|
|
|
|
auto srcStrideU = (srcWidth + 1) / 2; |
|
|
|
|
auto srcStrideV = srcStrideU; |
|
|
|
|
|
|
|
|
|
auto outStrideY = outWidth; |
|
|
|
|
auto outStrideU = (outWidth + 1) / 2; |
|
|
|
|
auto outStrideV = outStrideU; |
|
|
|
|
|
|
|
|
|
libyuv::I420Scale( |
|
|
|
|
srcY, srcStrideY, |
|
|
|
|
srcU, srcStrideU, |
|
|
|
|
srcV, srcStrideV, |
|
|
|
|
srcWidth, |
|
|
|
|
srcHeight, |
|
|
|
|
y, outStrideY, |
|
|
|
|
u, outStrideU, |
|
|
|
|
v, outStrideV, |
|
|
|
|
outWidth, |
|
|
|
|
outHeight, |
|
|
|
|
libyuv::kFilterBilinear |
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
return ResultYUV{ |
|
|
|
|
y, outStrideY, |
|
|
|
|
u, outStrideU, |
|
|
|
|
v, outStrideV, |
|
|
|
|
outWidth, |
|
|
|
|
outHeight, |
|
|
|
|
buf |
|
|
|
|
}; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
ResultYUV TextureCamera::YUVI420Rotate( |
|
|
|
|
const uint8_t* srcY, |
|
|
|
|
const uint8_t* srcU, |
|
|
|
|
const uint8_t* srcV, |
|
|
|
|
int srcWidth, |
|
|
|
|
int srcHeight, |
|
|
|
|
int degree // 0, 90, 180, 270
|
|
|
|
|
) |
|
|
|
|
{ |
|
|
|
|
int outWidth = srcWidth; |
|
|
|
|
int outHeight = srcHeight; |
|
|
|
|
|
|
|
|
|
if (degree == 90 || degree == 270) { |
|
|
|
|
outWidth = srcHeight; |
|
|
|
|
outHeight = srcWidth; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
enum libyuv::RotationMode mode = (enum libyuv::RotationMode) degree; |
|
|
|
|
|
|
|
|
|
auto bufSize = (((outWidth * outHeight) + ((outWidth + 1) / 2) * ((outHeight + 1) / 2))) * 2; |
|
|
|
|
auto buf = std::shared_ptr<uint8_t>((uint8_t *) malloc(bufSize), free); |
|
|
|
|
|
|
|
|
|
auto y = buf.get(); |
|
|
|
|
auto u = buf.get() + outWidth * outHeight; |
|
|
|
|
auto v = buf.get() + outWidth * outHeight + (outWidth * outHeight + 3) / 4; |
|
|
|
|
|
|
|
|
|
auto srcStrideY = srcWidth; |
|
|
|
|
auto srcStrideU = (srcWidth + 1) / 2; |
|
|
|
|
auto srcStrideV = srcStrideU; |
|
|
|
|
|
|
|
|
|
auto outStrideY = outWidth; |
|
|
|
|
auto outStrideU = (outWidth + 1) / 2; |
|
|
|
|
auto outStrideV = outStrideU; |
|
|
|
|
|
|
|
|
|
libyuv::I420Rotate( |
|
|
|
|
srcY, srcStrideY, |
|
|
|
|
srcU, srcStrideU, |
|
|
|
|
srcV, srcStrideV, |
|
|
|
|
y, outStrideY, |
|
|
|
|
u, outStrideU, |
|
|
|
|
v, outStrideV, |
|
|
|
|
srcWidth, |
|
|
|
|
srcHeight, |
|
|
|
|
mode |
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
return ResultYUV{ |
|
|
|
|
y, outStrideY, |
|
|
|
|
u, outStrideU, |
|
|
|
|
v, outStrideV, |
|
|
|
|
outWidth, |
|
|
|
|
outHeight, |
|
|
|
|
buf |
|
|
|
|
}; |
|
|
|
|
} |
|
|
|
|