OrangePi3588Media/plugins/alarm/actions/snapshot_action.cpp
2026-04-18 20:58:31 +08:00

336 lines
11 KiB
C++

#include "snapshot_action.h"
#include <chrono>
#include <cstdint>
#include <cstring>
#include <ctime>
#include <iomanip>
#include <mutex>
#include <sstream>
#include "utils/dma_alloc.h"
#include "utils/logger.h"
#if defined(RK3588_ENABLE_FFMPEG)
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavutil/imgutils.h>
#include <libavutil/pixfmt.h>
}
#define HAS_FFMPEG 1
#else
#define HAS_FFMPEG 0
#endif
namespace rk3588 {
namespace {
bool SafeLocalTime(std::time_t t, std::tm& out) {
#if defined(_WIN32)
return localtime_s(&out, &t) == 0;
#elif defined(__unix__) || defined(__APPLE__)
return localtime_r(&t, &out) != nullptr;
#else
static std::mutex mu;
std::lock_guard<std::mutex> lock(mu);
std::tm* p = std::localtime(&t);
if (!p) return false;
out = *p;
return true;
#endif
}
inline uint8_t ClipU8(int v) {
if (v < 0) return 0;
if (v > 255) return 255;
return static_cast<uint8_t>(v);
}
inline const uint8_t* PlanePtr(const Frame& f, int idx) {
if (idx < 0 || idx >= f.plane_count) return nullptr;
if (f.planes[idx].data) return f.planes[idx].data;
if (!f.data) return nullptr;
const int off = f.planes[idx].offset;
if (off < 0) return nullptr;
return f.data + static_cast<size_t>(off);
}
inline int PlaneStride(const Frame& f, int idx, int fallback) {
if (idx >= 0 && idx < f.plane_count && f.planes[idx].stride > 0) return f.planes[idx].stride;
if (f.stride > 0) return f.stride;
return fallback;
}
#if HAS_FFMPEG
bool FillYuv420pFromFrame(const Frame& src, AVFrame* dst) {
if (!dst) return false;
if (dst->format != AV_PIX_FMT_YUV420P && dst->format != AV_PIX_FMT_YUVJ420P) return false;
if (src.width <= 0 || src.height <= 0) return false;
const int w = src.width;
const int h = src.height;
uint8_t* y = dst->data[0];
uint8_t* u = dst->data[1];
uint8_t* v = dst->data[2];
const int y_stride = dst->linesize[0];
const int u_stride = dst->linesize[1];
const int v_stride = dst->linesize[2];
if (!y || !u || !v || y_stride <= 0 || u_stride <= 0 || v_stride <= 0) return false;
if (src.format == PixelFormat::YUV420) {
const uint8_t* sy = PlanePtr(src, 0);
const uint8_t* su = PlanePtr(src, 1);
const uint8_t* sv = PlanePtr(src, 2);
if (!sy || !su || !sv) return false;
const int sy_stride = PlaneStride(src, 0, w);
const int su_stride = PlaneStride(src, 1, w / 2);
const int sv_stride = PlaneStride(src, 2, w / 2);
for (int row = 0; row < h; ++row) {
std::memcpy(y + row * y_stride, sy + row * sy_stride, static_cast<size_t>(w));
}
const int uv_h = h / 2;
const int uv_w = w / 2;
for (int row = 0; row < uv_h; ++row) {
std::memcpy(u + row * u_stride, su + row * su_stride, static_cast<size_t>(uv_w));
std::memcpy(v + row * v_stride, sv + row * sv_stride, static_cast<size_t>(uv_w));
}
return true;
}
if (src.format == PixelFormat::NV12) {
const uint8_t* sy = PlanePtr(src, 0);
const uint8_t* suv = PlanePtr(src, 1);
if (!sy) return false;
const int sy_stride = PlaneStride(src, 0, w);
const int suv_stride = PlaneStride(src, 1, w);
if (!suv) {
// Fallback: packed NV12 layout
if (!src.data) return false;
suv = src.data + static_cast<size_t>(sy_stride) * static_cast<size_t>(h);
}
for (int row = 0; row < h; ++row) {
std::memcpy(y + row * y_stride, sy + row * sy_stride, static_cast<size_t>(w));
}
const int uv_h = h / 2;
const int uv_w = w / 2;
for (int row = 0; row < uv_h; ++row) {
const uint8_t* src_uv = suv + row * suv_stride;
uint8_t* dst_u = u + row * u_stride;
uint8_t* dst_v = v + row * v_stride;
for (int col = 0; col < uv_w; ++col) {
dst_u[col] = src_uv[col * 2 + 0];
dst_v[col] = src_uv[col * 2 + 1];
}
}
return true;
}
if (src.format == PixelFormat::RGB || src.format == PixelFormat::BGR) {
const bool is_bgr = (src.format == PixelFormat::BGR);
const uint8_t* s = PlanePtr(src, 0);
if (!s) s = src.data;
if (!s) return false;
const int s_stride = PlaneStride(src, 0, w * 3);
const int uv_w = w / 2;
const int uv_h = h / 2;
for (int row = 0; row < h; row += 2) {
const uint8_t* row0 = s + row * s_stride;
const uint8_t* row1 = (row + 1 < h) ? (s + (row + 1) * s_stride) : row0;
uint8_t* y0 = y + row * y_stride;
uint8_t* y1 = (row + 1 < h) ? (y + (row + 1) * y_stride) : y0;
const int uv_row = row / 2;
uint8_t* uu = (uv_row < uv_h) ? (u + uv_row * u_stride) : nullptr;
uint8_t* vv = (uv_row < uv_h) ? (v + uv_row * v_stride) : nullptr;
for (int col = 0; col < w; col += 2) {
int u_sum = 0;
int v_sum = 0;
int samples = 0;
auto sample = [&](const uint8_t* p, uint8_t* ydst) {
const int b = is_bgr ? p[0] : p[2];
const int g = p[1];
const int r = is_bgr ? p[2] : p[0];
const int yy = (77 * r + 150 * g + 29 * b + 128) >> 8;
*ydst = ClipU8(yy);
u_sum += (-43 * r - 84 * g + 127 * b);
v_sum += (127 * r - 106 * g - 21 * b);
samples += 1;
};
const uint8_t* p00 = row0 + col * 3;
const uint8_t* p01 = (col + 1 < w) ? (row0 + (col + 1) * 3) : p00;
const uint8_t* p10 = row1 + col * 3;
const uint8_t* p11 = (col + 1 < w) ? (row1 + (col + 1) * 3) : p10;
sample(p00, &y0[col]);
if (col + 1 < w) sample(p01, &y0[col + 1]);
if (row + 1 < h) {
sample(p10, &y1[col]);
if (col + 1 < w) sample(p11, &y1[col + 1]);
} else {
// If no second row, count row0 twice for chroma.
sample(p10, &y1[col]);
if (col + 1 < w) sample(p11, &y1[col + 1]);
}
const int denom = samples > 0 ? samples : 1;
const int u_val = ((u_sum / denom) + 128 * 256 + 128) >> 8;
const int v_val = ((v_sum / denom) + 128 * 256 + 128) >> 8;
const int uv_col = col / 2;
if (uu && vv && uv_col >= 0 && uv_col < uv_w) {
uu[uv_col] = ClipU8(u_val);
vv[uv_col] = ClipU8(v_val);
}
}
}
return true;
}
return false;
}
#endif
} // namespace
bool SnapshotAction::Init(const SimpleJson& config) {
format_ = config.ValueOr<std::string>("format", "jpg");
quality_ = config.ValueOr<int>("quality", 85);
use_date_prefix_ = config.ValueOr<bool>("use_date_prefix", true);
if (const SimpleJson* upload_cfg = config.Find("upload")) {
uploader_ = CreateUploader(*upload_cfg);
if (!uploader_) {
LogError("[SnapshotAction] failed to create uploader");
return false;
}
} else {
// Default to local storage
SimpleJson default_cfg;
uploader_ = CreateUploader(default_cfg);
}
LogInfo("[SnapshotAction] initialized, format=" + format_ + " quality=" + std::to_string(quality_));
return true;
}
void SnapshotAction::Execute(AlarmEvent& event, std::shared_ptr<Frame> frame) {
if (!frame || !frame->data) {
LogWarn("[SnapshotAction] no frame data event_id=" + (event.event_id.empty() ? "-" : event.event_id));
return;
}
if (frame->DmaFd() >= 0) frame->SyncStart();
auto jpeg_data = EncodeJpeg(frame);
if (frame->DmaFd() >= 0) frame->SyncEnd();
if (jpeg_data.empty()) {
LogWarn("[SnapshotAction] failed to encode JPEG event_id=" + (event.event_id.empty() ? "-" : event.event_id));
return;
}
std::string key = GenerateKey(event);
auto result = uploader_->Upload(key, jpeg_data.data(), jpeg_data.size(), "image/jpeg");
if (result.success) {
event.snapshot_url = result.url;
LogInfo("[SnapshotAction] uploaded: " + result.url +
" event_id=" + (event.event_id.empty() ? "-" : event.event_id));
} else {
LogWarn("[SnapshotAction] upload failed event_id=" + (event.event_id.empty() ? "-" : event.event_id) +
" error=" + result.error);
}
}
std::string SnapshotAction::GenerateKey(const AlarmEvent& event) {
auto now = std::chrono::system_clock::now();
auto time_t_now = std::chrono::system_clock::to_time_t(now);
std::tm tm{};
(void)SafeLocalTime(time_t_now, tm);
std::ostringstream oss;
if (use_date_prefix_) {
oss << std::put_time(&tm, "%Y%m%d") << "/";
}
oss << event.node_id << "_"
<< std::put_time(&tm, "%H%M%S") << "_"
<< event.frame_id << "." << format_;
return oss.str();
}
std::vector<uint8_t> SnapshotAction::EncodeJpeg(const std::shared_ptr<Frame>& frame) {
std::vector<uint8_t> output;
#if HAS_FFMPEG
const AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_MJPEG);
if (!codec) {
LogError("[SnapshotAction] MJPEG encoder not found");
return output;
}
AVCodecContext* ctx = avcodec_alloc_context3(codec);
if (!ctx) return output;
ctx->width = frame->width;
ctx->height = frame->height;
ctx->time_base = AVRational{1, 25};
// Some embedded FFmpeg builds expose MJPEG encoder that only accepts YUVJ*.
ctx->pix_fmt = AV_PIX_FMT_YUVJ420P;
ctx->color_range = AVCOL_RANGE_JPEG;
// Set quality (1-31, lower is better for MJPEG)
int q = 31 - (quality_ * 30 / 100);
ctx->qmin = q;
ctx->qmax = q;
if (avcodec_open2(ctx, codec, nullptr) < 0) {
avcodec_free_context(&ctx);
return output;
}
AVFrame* av_frame = av_frame_alloc();
av_frame->width = frame->width;
av_frame->height = frame->height;
av_frame->format = AV_PIX_FMT_YUVJ420P;
av_frame->color_range = AVCOL_RANGE_JPEG;
if (av_frame_get_buffer(av_frame, 32) < 0) {
av_frame_free(&av_frame);
avcodec_free_context(&ctx);
return output;
}
if (!FillYuv420pFromFrame(*frame, av_frame)) {
av_frame_free(&av_frame);
avcodec_free_context(&ctx);
return output;
}
av_frame->pts = 0;
AVPacket* pkt = av_packet_alloc();
if (avcodec_send_frame(ctx, av_frame) == 0) {
if (avcodec_receive_packet(ctx, pkt) == 0) {
output.assign(pkt->data, pkt->data + pkt->size);
}
}
av_packet_free(&pkt);
av_frame_free(&av_frame);
avcodec_free_context(&ctx);
#else
// Stub: just create a minimal valid JPEG header for testing
LogError("[SnapshotAction] FFmpeg not enabled, cannot encode JPEG");
#endif
return output;
}
} // namespace rk3588