Initial commit of TensorRT Pipeline project, including core components for video stream inference. Added configuration files, build scripts, and basic structure for input, output, and processing modules. Implemented logging and error handling mechanisms. Included example usage and testing framework setup.

This commit is contained in:
sladro 2024-12-24 16:26:41 +08:00
parent e13cb3659c
commit 90a7b5acc0
23 changed files with 1519 additions and 1181 deletions

74
.gitignore vendored
View File

@ -1,38 +1,38 @@
# Build directories
build/
bin/
lib/
# IDE files
.vscode/
.idea/
*.swp
*.swo
# Compiled files
*.o
*.so
*.a
*.dll
*.dylib
# CMake files
CMakeCache.txt
CMakeFiles/
cmake_install.cmake
compile_commands.json
# TensorRT engine files
*.engine
*.plan
# Python cache
__pycache__/
*.py[cod]
# Logs
*.log
# System files
.DS_Store
# Build directories
build/
bin/
lib/
# IDE files
.vscode/
.idea/
*.swp
*.swo
# Compiled files
*.o
*.so
*.a
*.dll
*.dylib
# CMake files
CMakeCache.txt
CMakeFiles/
cmake_install.cmake
compile_commands.json
# TensorRT engine files
*.engine
*.plan
# Python cache
__pycache__/
*.py[cod]
# Logs
*.log
# System files
.DS_Store
Thumbs.db

View File

@ -117,12 +117,15 @@ inference:
engine_path: "/app/models/yolov8n.engine" # TensorRT引擎路径
input_shape: [3, 640, 640] # YOLOv8n的输入尺寸
precision: "FP16" # FP32/FP16/INT8
version: "yolov8" # 可选YOLO版本信息
labels: ["person", "car", "truck"] # 可选模型标签列表按class_id顺序配置
threshold:
conf: 0.5
nms: 0.45
gpu_id: 0
render:
enable: true # 控制是否启用渲染功能
window:
name: "Detection Results"
width: 1280

View File

@ -0,0 +1 @@
1|[ERROR] Error parsing log config: yaml-cpp: error at line 60, column 1: bad conversion

View File

@ -0,0 +1,3 @@
1|log:
2| level: info
3| save_path:

View File

@ -0,0 +1,5 @@
1|TEST_F(PipelineTest, Initialization) {
2| Pipeline pipeline(config_path_, true); // 使用测试模式
3| ASSERT_TRUE(pipeline.init());
4| EXPECT_TRUE(pipeline.isRunning());
5|}

View File

@ -42,6 +42,66 @@ rtsp:
- 默认值30.0
- 说明:目标帧率,用于控制读取速度,设置为 0 则不限制帧率
### YOLO 模型配置
YOLO 模型相关配置位于配置文件的 `inference.model` 部分:
```yaml
inference:
model:
onnx_path: "/app/models/yolov8n.onnx" # ONNX模型路径
engine_path: "/app/models/yolov8n.engine" # TensorRT引擎路径
input_shape: [3, 640, 640] # YOLOv8n的输入尺寸
precision: "FP16" # FP32/FP16/INT8
version: "yolov8" # 可选YOLO版本信息
labels: ["person", "car", "truck"] # 可选:模型标签列表
threshold:
conf: 0.5 # 置信度阈值
nms: 0.45 # NMS阈值
gpu_id: 0 # GPU设备ID
```
#### 参数说明
- `onnx_path`: ONNX模型文件路径必须指定
- `engine_path`: TensorRT引擎文件路径必须指定
- `input_shape`:
- 类型:整数数组 [channels, height, width]
- 默认值:[3, 640, 640]
- 说明:模型输入尺寸,必须与模型定义匹配
- `precision`:
- 类型:字符串
- 默认值:"FP16"
- 可选值:["FP32", "FP16", "INT8"]
- 说明:模型精度模式,影响推理速度和精度
- `version`:
- 类型:字符串
- 默认值:空
- 可选值:任意字符串
- 说明YOLO模型版本信息用于记录和管理
- `labels`:
- 类型:字符串数组
- 默认值:空
- 说明模型支持的标签列表按class_id顺序排列
- `threshold.conf`:
- 类型:浮点数
- 默认值0.5
- 范围:[0.0, 1.0]
- 说明:检测置信度阈值,低于此值的检测框会被过滤
- `threshold.nms`:
- 类型:浮点数
- 默认值0.45
- 范围:[0.0, 1.0]
- 说明:非极大值抑制阈值,用于过滤重叠框
- `gpu_id`:
- 类型:整数
- 默认值0
- 说明使用的GPU设备ID
## 类说明
### RtspReader

View File

@ -34,10 +34,14 @@ struct ModelConfig {
float nms{0.45f}; // NMS阈值
} threshold;
int gpu_id{0}; // GPU设备ID
std::vector<std::string> labels; // 可选:模型标签列表
std::string version; // 可选YOLO版本信息
};
// 渲染配置
struct RenderConfig {
bool enable{true}; // 控制是否启用渲染功能,默认启用
// 窗口配置
struct {
std::string name{"Detection Results"};

View File

@ -11,6 +11,9 @@ namespace {
renderer::RendererConfig convertToRendererConfig(const RenderConfig& config) {
renderer::RendererConfig renderer_config;
// 转换渲染启用状态
renderer_config.enable = config.enable;
// 转换窗口配置
renderer_config.window_name = config.window.name;
renderer_config.window_width = config.window.width;

View File

@ -1,467 +1,503 @@
#include "yaml_config_parser.hpp"
#include "../common/logger.hpp"
#include <filesystem>
#include <iostream>
#include <opencv2/core.hpp>
namespace pipeline {
bool YamlConfigParser::parse(const std::string& config_file) {
try {
Logger::info("Parsing config file: " + config_file);
yaml_config_ = YAML::LoadFile(config_file);
// 解析各个配置部分
if (!parseInputConfig(yaml_config_["input"])) {
Logger::error("Failed to parse input config");
return false;
}
if (!parseModelConfig(yaml_config_["inference"])) {
Logger::error("Failed to parse model config");
return false;
}
if (!parseRenderConfig(yaml_config_["render"])) {
Logger::error("Failed to parse render config");
return false;
}
if (!parseOutputConfig(yaml_config_["output"])) {
Logger::error("Failed to parse output config");
return false;
}
if (!parseLogConfig(yaml_config_["log"])) {
Logger::error("Failed to parse log config");
return false;
}
Logger::info("Successfully parsed all configurations");
return true;
} catch (const YAML::Exception& e) {
Logger::error("YAML parsing error: " + std::string(e.what()));
return false;
} catch (const std::exception& e) {
Logger::error("Error in parse: " + std::string(e.what()));
return false;
}
}
bool YamlConfigParser::validate() {
// 基本验证
if (config_.input.sources.empty()) {
std::cerr << "Error: No input sources configured" << std::endl;
return false;
}
// 验证输入源配置
for (const auto& source : config_.input.sources) {
if (source.type.empty() || source.name.empty()) {
std::cerr << "Error: Input source missing type or name" << std::endl;
return false;
}
if (source.type == "rtsp" && source.url.empty()) {
std::cerr << "Error: RTSP source missing URL" << std::endl;
return false;
}
if (source.buffer_size <= 0) {
std::cerr << "Error: Invalid buffer size for source: " << source.name << std::endl;
return false;
}
}
// 验证推理配置
if (config_.inference.engine_path.empty()) {
std::cerr << "Error: Model engine path not specified" << std::endl;
return false;
}
if (config_.inference.input_shape.empty()) {
std::cerr << "Error: Model input shape not specified" << std::endl;
return false;
}
if (config_.inference.threshold.conf < 0.0f || config_.inference.threshold.conf > 1.0f) {
std::cerr << "Error: Invalid confidence threshold (must be between 0.0 and 1.0)" << std::endl;
return false;
}
if (config_.inference.threshold.nms < 0.0f || config_.inference.threshold.nms > 1.0f) {
std::cerr << "Error: Invalid NMS threshold (must be between 0.0 and 1.0)" << std::endl;
return false;
}
// 验证渲染配置
// 验证窗口配置
if (config_.render.window.width <= 0 || config_.render.window.height <= 0) {
std::cerr << "Error: Invalid window dimensions" << std::endl;
return false;
}
if (config_.render.window.name.empty()) {
std::cerr << "Error: Window name not specified" << std::endl;
return false;
}
// 验证默认样式
if (config_.render.default_style.transparency < 0.0f ||
config_.render.default_style.transparency > 1.0f) {
std::cerr << "Error: Invalid default style transparency (must be between 0.0 and 1.0)" << std::endl;
return false;
}
if (config_.render.default_style.box_thickness <= 0 ||
config_.render.default_style.font_thickness <= 0) {
std::cerr << "Error: Invalid default style thickness values" << std::endl;
return false;
}
if (config_.render.default_style.font_scale <= 0.0) {
std::cerr << "Error: Invalid default style font scale" << std::endl;
return false;
}
// 验证类别样式
for (const auto& [class_name, style] : config_.render.class_styles) {
if (class_name.empty()) {
std::cerr << "Error: Empty class name in style configuration" << std::endl;
return false;
}
if (style.transparency < 0.0f || style.transparency > 1.0f) {
std::cerr << "Error: Invalid transparency for class " << class_name << std::endl;
return false;
}
if (style.box_thickness <= 0 || style.font_thickness <= 0) {
std::cerr << "Error: Invalid thickness values for class " << class_name << std::endl;
return false;
}
if (style.font_scale <= 0.0) {
std::cerr << "Error: Invalid font scale for class " << class_name << std::endl;
return false;
}
}
// 验证性能指标配置
if (config_.render.metrics.update_interval_ms <= 0) {
std::cerr << "Error: Invalid metrics update interval" << std::endl;
return false;
}
// 验证输出配置
for (const auto& target : config_.output.targets) {
if (target.type.empty() || target.name.empty()) {
std::cerr << "Error: Output target missing type or name" << std::endl;
return false;
}
if (target.type == "video") {
if (target.path.empty()) {
std::cerr << "Error: Video output target missing path" << std::endl;
return false;
}
if (target.fps <= 0) {
std::cerr << "Error: Invalid video output fps" << std::endl;
return false;
}
if (target.bitrate <= 0) {
std::cerr << "Error: Invalid video output bitrate" << std::endl;
return false;
}
if (target.codec.empty()) {
std::cerr << "Error: Video output codec not specified" << std::endl;
return false;
}
}
}
// 验证日志配置
if (config_.log.level.empty()) {
std::cerr << "Error: Log level not specified" << std::endl;
return false;
}
if (config_.log.save_path.empty()) {
std::cerr << "Error: Log save path not specified" << std::endl;
return false;
}
return true;
}
bool YamlConfigParser::parseInputConfig(const YAML::Node& node) {
try {
// 解析输入源
if (node["sources"]) {
for (const auto& source : node["sources"]) {
InputSourceConfig src_config;
src_config.type = source["type"].as<std::string>();
src_config.name = source["name"].as<std::string>();
if (source["url"]) {
src_config.url = source["url"].as<std::string>();
}
if (source["buffer_size"]) {
src_config.buffer_size = source["buffer_size"].as<int>();
}
// 解析输出目标列表
if (source["outputs"]) {
src_config.outputs = source["outputs"].as<std::vector<std::string>>();
}
config_.input.sources.push_back(src_config);
}
}
// 解析批处理大小
if (node["max_batch_size"]) {
config_.input.max_batch_size = node["max_batch_size"].as<int>();
}
return true;
} catch (const YAML::Exception& e) {
Logger::error("Error parsing input config: " + std::string(e.what()));
return false;
}
}
bool YamlConfigParser::parseModelConfig(const YAML::Node& node) {
try {
if (!node["model"]) {
Logger::error("Model configuration section not found");
return false;
}
const auto& model = node["model"];
// 检查必需的字段
if (!model["onnx_path"]) {
Logger::error("ONNX path not specified in model config");
return false;
}
if (!model["engine_path"]) {
Logger::error("Engine path not specified in model config");
return false;
}
if (!model["input_shape"]) {
Logger::error("Input shape not specified in model config");
return false;
}
// 读取配置
config_.inference.engine_path = model["engine_path"].as<std::string>();
config_.inference.input_shape = model["input_shape"].as<std::vector<int>>();
config_.inference.precision = model["precision"] ? model["precision"].as<std::string>() : "FP16";
// 读取阈值配置
if (node["threshold"]) {
config_.inference.threshold.conf = node["threshold"]["conf"] ?
node["threshold"]["conf"].as<float>() : 0.5f;
config_.inference.threshold.nms = node["threshold"]["nms"] ?
node["threshold"]["nms"].as<float>() : 0.45f;
}
// 读取GPU ID
config_.inference.gpu_id = node["gpu_id"] ? node["gpu_id"].as<int>() : 0;
// 打印读取的配置
Logger::info("Loaded model configuration:");
Logger::info(" Engine path: " + config_.inference.engine_path);
Logger::info(" Input shape: " + std::to_string(config_.inference.input_shape[0]) + "," +
std::to_string(config_.inference.input_shape[1]) + "," +
std::to_string(config_.inference.input_shape[2]));
Logger::info(" Precision: " + config_.inference.precision);
Logger::info(" GPU ID: " + std::to_string(config_.inference.gpu_id));
Logger::info(" Confidence threshold: " + std::to_string(config_.inference.threshold.conf));
Logger::info(" NMS threshold: " + std::to_string(config_.inference.threshold.nms));
return true;
} catch (const YAML::Exception& e) {
Logger::error("Failed to parse model config: " + std::string(e.what()));
return false;
} catch (const std::exception& e) {
Logger::error("Error in parseModelConfig: " + std::string(e.what()));
return false;
}
}
bool YamlConfigParser::parseRenderConfig(const YAML::Node& node) {
try {
// 解析窗口配置
if (node["window"]) {
const auto& window = node["window"];
if (window["name"]) {
config_.render.window.name = window["name"].as<std::string>();
}
if (window["width"]) {
config_.render.window.width = window["width"].as<int>();
}
if (window["height"]) {
config_.render.window.height = window["height"].as<int>();
}
if (window["fullscreen"]) {
config_.render.window.fullscreen = window["fullscreen"].as<bool>();
}
}
// 解析默认样式
if (node["default_style"]) {
const auto& style = node["default_style"];
if (style["box_color"]) {
auto color = style["box_color"].as<std::vector<int>>();
if (color.size() >= 3) {
config_.render.default_style.box_color = cv::Scalar(color[0], color[1], color[2]);
}
}
if (style["text_color"]) {
auto color = style["text_color"].as<std::vector<int>>();
if (color.size() >= 3) {
config_.render.default_style.text_color = cv::Scalar(color[0], color[1], color[2]);
}
}
if (style["transparency"]) {
config_.render.default_style.transparency = style["transparency"].as<float>();
}
if (style["box_thickness"]) {
config_.render.default_style.box_thickness = style["box_thickness"].as<int>();
}
if (style["font_scale"]) {
config_.render.default_style.font_scale = style["font_scale"].as<double>();
}
if (style["font_thickness"]) {
config_.render.default_style.font_thickness = style["font_thickness"].as<int>();
}
}
// 解析类别样式
if (node["class_styles"]) {
for (const auto& class_style : node["class_styles"]) {
std::string class_name = class_style.first.as<std::string>();
const auto& style = class_style.second;
RenderConfig::ClassStyle class_config;
if (style["box_color"]) {
auto color = style["box_color"].as<std::vector<int>>();
if (color.size() >= 3) {
class_config.box_color = cv::Scalar(color[0], color[1], color[2]);
}
}
if (style["text_color"]) {
auto color = style["text_color"].as<std::vector<int>>();
if (color.size() >= 3) {
class_config.text_color = cv::Scalar(color[0], color[1], color[2]);
}
}
if (style["transparency"]) {
class_config.transparency = style["transparency"].as<float>();
}
if (style["box_thickness"]) {
class_config.box_thickness = style["box_thickness"].as<int>();
}
if (style["font_scale"]) {
class_config.font_scale = style["font_scale"].as<double>();
}
if (style["font_thickness"]) {
class_config.font_thickness = style["font_thickness"].as<int>();
}
config_.render.class_styles[class_name] = class_config;
}
}
// 解析性能指标配置
if (node["metrics"]) {
const auto& metrics = node["metrics"];
if (metrics["show_fps"]) {
config_.render.metrics.show_fps = metrics["show_fps"].as<bool>();
}
if (metrics["show_inference_time"]) {
config_.render.metrics.show_inference_time = metrics["show_inference_time"].as<bool>();
}
if (metrics["show_gpu_usage"]) {
config_.render.metrics.show_gpu_usage = metrics["show_gpu_usage"].as<bool>();
}
if (metrics["update_interval_ms"]) {
config_.render.metrics.update_interval_ms = metrics["update_interval_ms"].as<int>();
}
}
return true;
} catch (const YAML::Exception& e) {
Logger::error("Error parsing render config: " + std::string(e.what()));
return false;
}
}
bool YamlConfigParser::parseOutputConfig(const YAML::Node& node) {
try {
if (node["targets"]) {
for (const auto& target : node["targets"]) {
OutputTargetConfig target_config;
target_config.type = target["type"].as<std::string>();
target_config.name = target["name"].as<std::string>();
if (target["path"]) {
target_config.path = target["path"].as<std::string>();
}
if (target["fps"]) {
target_config.fps = target["fps"].as<int>();
}
if (target["codec"]) {
target_config.codec = target["codec"].as<std::string>();
}
if (target["bitrate"]) {
target_config.bitrate = target["bitrate"].as<int>();
}
config_.output.targets.push_back(target_config);
}
}
return true;
} catch (const YAML::Exception& e) {
return false;
}
}
bool YamlConfigParser::parseLogConfig(const YAML::Node& node) {
try {
if (node["level"]) {
config_.log.level = node["level"].as<std::string>();
}
if (node["save_path"]) {
config_.log.save_path = node["save_path"].as<std::string>();
}
return true;
} catch (const YAML::Exception& e) {
return false;
}
}
bool YamlConfigParser::parseRtspConfig(const YAML::Node& config, RtspReader::Config& rtsp_config) {
try {
auto rtsp_node = config["rtsp"];
if (!rtsp_node) {
return false;
}
// 设置默认值
rtsp_config = RtspReader::Config();
// 读取配置
if (rtsp_node["buffer_size"]) {
rtsp_config.buffer_size = rtsp_node["buffer_size"].as<int>();
}
if (rtsp_node["max_retry_count"]) {
rtsp_config.max_retry_count = rtsp_node["max_retry_count"].as<int>();
}
if (rtsp_node["retry_interval_ms"]) {
rtsp_config.retry_interval_ms = rtsp_node["retry_interval_ms"].as<int>();
}
if (rtsp_node["frame_timeout_ms"]) {
rtsp_config.frame_timeout_ms = rtsp_node["frame_timeout_ms"].as<int>();
}
if (rtsp_node["target_fps"]) {
rtsp_config.target_fps = rtsp_node["target_fps"].as<float>();
}
return true;
} catch (const YAML::Exception& e) {
Logger::error("Error parsing RTSP config: " + std::string(e.what()));
return false;
}
}
// 工厂函数实现
std::unique_ptr<ConfigParser> createConfigParser() {
return std::make_unique<YamlConfigParser>();
}
#include "yaml_config_parser.hpp"
#include "../common/logger.hpp"
#include <filesystem>
#include <iostream>
#include <opencv2/core.hpp>
namespace pipeline {
bool YamlConfigParser::parse(const std::string& config_file) {
try {
Logger::info("Parsing config file: " + config_file);
yaml_config_ = YAML::LoadFile(config_file);
// 解析各个配置部分
if (!parseInputConfig(yaml_config_["input"])) {
Logger::error("Failed to parse input config");
return false;
}
if (!parseModelConfig(yaml_config_["inference"])) {
Logger::error("Failed to parse model config");
return false;
}
if (!parseRenderConfig(yaml_config_["render"])) {
Logger::error("Failed to parse render config");
return false;
}
if (!parseOutputConfig(yaml_config_["output"])) {
Logger::error("Failed to parse output config");
return false;
}
if (!parseLogConfig(yaml_config_["log"])) {
Logger::error("Failed to parse log config");
return false;
}
Logger::info("Successfully parsed all configurations");
return true;
} catch (const YAML::Exception& e) {
Logger::error("YAML parsing error: " + std::string(e.what()));
return false;
} catch (const std::exception& e) {
Logger::error("Error in parse: " + std::string(e.what()));
return false;
}
}
bool YamlConfigParser::validate() {
// 基本验证
if (config_.input.sources.empty()) {
std::cerr << "Error: No input sources configured" << std::endl;
return false;
}
// 验证输入源配置
for (const auto& source : config_.input.sources) {
if (source.type.empty() || source.name.empty()) {
std::cerr << "Error: Input source missing type or name" << std::endl;
return false;
}
if (source.type == "rtsp" && source.url.empty()) {
std::cerr << "Error: RTSP source missing URL" << std::endl;
return false;
}
if (source.buffer_size <= 0) {
std::cerr << "Error: Invalid buffer size for source: " << source.name << std::endl;
return false;
}
}
// 验证推理配置
if (config_.inference.engine_path.empty()) {
std::cerr << "Error: Model engine path not specified" << std::endl;
return false;
}
if (config_.inference.input_shape.empty()) {
std::cerr << "Error: Model input shape not specified" << std::endl;
return false;
}
if (config_.inference.threshold.conf < 0.0f || config_.inference.threshold.conf > 1.0f) {
std::cerr << "Error: Invalid confidence threshold (must be between 0.0 and 1.0)" << std::endl;
return false;
}
if (config_.inference.threshold.nms < 0.0f || config_.inference.threshold.nms > 1.0f) {
std::cerr << "Error: Invalid NMS threshold (must be between 0.0 and 1.0)" << std::endl;
return false;
}
// 验证渲染配置
// 验证窗口配置
if (config_.render.window.width <= 0 || config_.render.window.height <= 0) {
std::cerr << "Error: Invalid window dimensions" << std::endl;
return false;
}
if (config_.render.window.name.empty()) {
std::cerr << "Error: Window name not specified" << std::endl;
return false;
}
// 验证默认样式
if (config_.render.default_style.transparency < 0.0f ||
config_.render.default_style.transparency > 1.0f) {
std::cerr << "Error: Invalid default style transparency (must be between 0.0 and 1.0)" << std::endl;
return false;
}
if (config_.render.default_style.box_thickness <= 0 ||
config_.render.default_style.font_thickness <= 0) {
std::cerr << "Error: Invalid default style thickness values" << std::endl;
return false;
}
if (config_.render.default_style.font_scale <= 0.0) {
std::cerr << "Error: Invalid default style font scale" << std::endl;
return false;
}
// 验证类别样式
for (const auto& [class_name, style] : config_.render.class_styles) {
if (class_name.empty()) {
std::cerr << "Error: Empty class name in style configuration" << std::endl;
return false;
}
if (style.transparency < 0.0f || style.transparency > 1.0f) {
std::cerr << "Error: Invalid transparency for class " << class_name << std::endl;
return false;
}
if (style.box_thickness <= 0 || style.font_thickness <= 0) {
std::cerr << "Error: Invalid thickness values for class " << class_name << std::endl;
return false;
}
if (style.font_scale <= 0.0) {
std::cerr << "Error: Invalid font scale for class " << class_name << std::endl;
return false;
}
}
// 验证性能指标配置
if (config_.render.metrics.update_interval_ms <= 0) {
std::cerr << "Error: Invalid metrics update interval" << std::endl;
return false;
}
// 验证输出配置
for (const auto& target : config_.output.targets) {
if (target.type.empty() || target.name.empty()) {
std::cerr << "Error: Output target missing type or name" << std::endl;
return false;
}
if (target.type == "video") {
if (target.path.empty()) {
std::cerr << "Error: Video output target missing path" << std::endl;
return false;
}
if (target.fps <= 0) {
std::cerr << "Error: Invalid video output fps" << std::endl;
return false;
}
if (target.bitrate <= 0) {
std::cerr << "Error: Invalid video output bitrate" << std::endl;
return false;
}
if (target.codec.empty()) {
std::cerr << "Error: Video output codec not specified" << std::endl;
return false;
}
}
}
// 验证日志配置
if (config_.log.level.empty()) {
std::cerr << "Error: Log level not specified" << std::endl;
return false;
}
if (config_.log.save_path.empty()) {
std::cerr << "Error: Log save path not specified" << std::endl;
return false;
}
return true;
}
bool YamlConfigParser::parseInputConfig(const YAML::Node& node) {
try {
// 解析输入源
if (node["sources"]) {
for (const auto& source : node["sources"]) {
InputSourceConfig src_config;
src_config.type = source["type"].as<std::string>();
src_config.name = source["name"].as<std::string>();
if (source["url"]) {
src_config.url = source["url"].as<std::string>();
}
if (source["buffer_size"]) {
src_config.buffer_size = source["buffer_size"].as<int>();
}
// 解析输出目标列表
if (source["outputs"]) {
src_config.outputs = source["outputs"].as<std::vector<std::string>>();
}
config_.input.sources.push_back(src_config);
}
}
// 解析批处理大小
if (node["max_batch_size"]) {
config_.input.max_batch_size = node["max_batch_size"].as<int>();
}
return true;
} catch (const YAML::Exception& e) {
Logger::error("Error parsing input config: " + std::string(e.what()));
return false;
}
}
bool YamlConfigParser::parseModelConfig(const YAML::Node& node) {
try {
if (!node["model"]) {
Logger::error("Model configuration section not found");
return false;
}
const auto& model = node["model"];
// 检查必需的字段
if (!model["onnx_path"]) {
Logger::error("ONNX path not specified in model config");
return false;
}
if (!model["engine_path"]) {
Logger::error("Engine path not specified in model config");
return false;
}
if (!model["input_shape"]) {
Logger::error("Input shape not specified in model config");
return false;
}
// 读取配置
config_.inference.engine_path = model["engine_path"].as<std::string>();
config_.inference.input_shape = model["input_shape"].as<std::vector<int>>();
config_.inference.precision = model["precision"] ? model["precision"].as<std::string>() : "FP16";
// 读取可选的版本和标签配置
if (model["version"]) {
config_.inference.version = model["version"].as<std::string>();
}
if (model["labels"]) {
config_.inference.labels = model["labels"].as<std::vector<std::string>>();
}
// 读取阈值配置
if (node["threshold"]) {
config_.inference.threshold.conf = node["threshold"]["conf"] ?
node["threshold"]["conf"].as<float>() : 0.5f;
config_.inference.threshold.nms = node["threshold"]["nms"] ?
node["threshold"]["nms"].as<float>() : 0.45f;
}
// 读取GPU设备ID
config_.inference.gpu_id = node["gpu_id"] ? node["gpu_id"].as<int>() : 0;
// 打印加载的配置信息
Logger::info("Loaded model configuration:");
Logger::info(" Engine path: " + config_.inference.engine_path);
Logger::info(" Input shape: " + std::to_string(config_.inference.input_shape[0]) + "," +
std::to_string(config_.inference.input_shape[1]) + "," +
std::to_string(config_.inference.input_shape[2]));
Logger::info(" Precision: " + config_.inference.precision);
Logger::info(" GPU ID: " + std::to_string(config_.inference.gpu_id));
Logger::info(" Confidence threshold: " + std::to_string(config_.inference.threshold.conf));
Logger::info(" NMS threshold: " + std::to_string(config_.inference.threshold.nms));
if (!config_.inference.version.empty()) {
Logger::info(" Version: " + config_.inference.version);
}
if (!config_.inference.labels.empty()) {
std::string labels_str = " Labels: ";
for (const auto& label : config_.inference.labels) {
labels_str += label + ", ";
}
labels_str = labels_str.substr(0, labels_str.length() - 2); // 移除最后的", "
Logger::info(labels_str);
}
return true;
} catch (const YAML::Exception& e) {
Logger::error("Error parsing model config: " + std::string(e.what()));
return false;
}
}
bool YamlConfigParser::parseRenderConfig(const YAML::Node& node) {
try {
// 解析渲染启用状态
if (node["enable"]) {
config_.render.enable = node["enable"].as<bool>();
}
// 解析窗口配置
if (node["window"]) {
const auto& window = node["window"];
if (window["name"]) {
config_.render.window.name = window["name"].as<std::string>();
}
if (window["width"]) {
config_.render.window.width = window["width"].as<int>();
}
if (window["height"]) {
config_.render.window.height = window["height"].as<int>();
}
if (window["fullscreen"]) {
config_.render.window.fullscreen = window["fullscreen"].as<bool>();
}
}
// 解析默认样式
if (node["default_style"]) {
const auto& style = node["default_style"];
if (style["box_color"]) {
auto color = style["box_color"].as<std::vector<int>>();
if (color.size() >= 3) {
config_.render.default_style.box_color = cv::Scalar(color[0], color[1], color[2]);
}
}
if (style["text_color"]) {
auto color = style["text_color"].as<std::vector<int>>();
if (color.size() >= 3) {
config_.render.default_style.text_color = cv::Scalar(color[0], color[1], color[2]);
}
}
if (style["transparency"]) {
config_.render.default_style.transparency = style["transparency"].as<float>();
}
if (style["box_thickness"]) {
config_.render.default_style.box_thickness = style["box_thickness"].as<int>();
}
if (style["font_scale"]) {
config_.render.default_style.font_scale = style["font_scale"].as<double>();
}
if (style["font_thickness"]) {
config_.render.default_style.font_thickness = style["font_thickness"].as<int>();
}
}
// 解析类别样式
if (node["class_styles"]) {
for (const auto& class_style : node["class_styles"]) {
std::string class_name = class_style.first.as<std::string>();
const auto& style = class_style.second;
RenderConfig::ClassStyle class_config;
if (style["box_color"]) {
auto color = style["box_color"].as<std::vector<int>>();
if (color.size() >= 3) {
class_config.box_color = cv::Scalar(color[0], color[1], color[2]);
}
}
if (style["text_color"]) {
auto color = style["text_color"].as<std::vector<int>>();
if (color.size() >= 3) {
class_config.text_color = cv::Scalar(color[0], color[1], color[2]);
}
}
if (style["transparency"]) {
class_config.transparency = style["transparency"].as<float>();
}
if (style["box_thickness"]) {
class_config.box_thickness = style["box_thickness"].as<int>();
}
if (style["font_scale"]) {
class_config.font_scale = style["font_scale"].as<double>();
}
if (style["font_thickness"]) {
class_config.font_thickness = style["font_thickness"].as<int>();
}
config_.render.class_styles[class_name] = class_config;
}
}
// 解析性能指标配置
if (node["metrics"]) {
const auto& metrics = node["metrics"];
if (metrics["show_fps"]) {
config_.render.metrics.show_fps = metrics["show_fps"].as<bool>();
}
if (metrics["show_inference_time"]) {
config_.render.metrics.show_inference_time = metrics["show_inference_time"].as<bool>();
}
if (metrics["show_gpu_usage"]) {
config_.render.metrics.show_gpu_usage = metrics["show_gpu_usage"].as<bool>();
}
if (metrics["update_interval_ms"]) {
config_.render.metrics.update_interval_ms = metrics["update_interval_ms"].as<int>();
}
}
return true;
} catch (const YAML::Exception& e) {
Logger::error("Error parsing render config: " + std::string(e.what()));
return false;
}
}
bool YamlConfigParser::parseOutputConfig(const YAML::Node& node) {
try {
if (node["targets"]) {
for (const auto& target : node["targets"]) {
OutputTargetConfig target_config;
target_config.type = target["type"].as<std::string>();
target_config.name = target["name"].as<std::string>();
if (target["path"]) {
target_config.path = target["path"].as<std::string>();
}
if (target["fps"]) {
target_config.fps = target["fps"].as<int>();
}
if (target["codec"]) {
target_config.codec = target["codec"].as<std::string>();
}
if (target["bitrate"]) {
target_config.bitrate = target["bitrate"].as<int>();
}
config_.output.targets.push_back(target_config);
}
}
return true;
} catch (const YAML::Exception& e) {
return false;
}
}
bool YamlConfigParser::parseLogConfig(const YAML::Node& node) {
try {
// 检查必需的字段
if (!node["level"]) {
Logger::error("Log level not specified");
return false;
}
if (!node["save_path"]) {
Logger::error("Log save path not specified");
return false;
}
// 读取配置
std::string level = node["level"].as<std::string>();
// 验证日志级别是否有效
if (level.empty() || (level != "debug" && level != "info" && level != "warn" && level != "error")) {
Logger::error("Invalid log level: " + level);
return false;
}
config_.log.level = level;
config_.log.save_path = node["save_path"].as<std::string>();
return true;
} catch (const YAML::Exception& e) {
Logger::error("Error parsing log config: " + std::string(e.what()));
return false;
}
}
bool YamlConfigParser::parseRtspConfig(const YAML::Node& config, RtspReader::Config& rtsp_config) {
try {
auto rtsp_node = config["rtsp"];
if (!rtsp_node) {
return false;
}
// 设置默认值
rtsp_config = RtspReader::Config();
// 读取配置
if (rtsp_node["buffer_size"]) {
rtsp_config.buffer_size = rtsp_node["buffer_size"].as<int>();
}
if (rtsp_node["max_retry_count"]) {
rtsp_config.max_retry_count = rtsp_node["max_retry_count"].as<int>();
}
if (rtsp_node["retry_interval_ms"]) {
rtsp_config.retry_interval_ms = rtsp_node["retry_interval_ms"].as<int>();
}
if (rtsp_node["frame_timeout_ms"]) {
rtsp_config.frame_timeout_ms = rtsp_node["frame_timeout_ms"].as<int>();
}
if (rtsp_node["target_fps"]) {
rtsp_config.target_fps = rtsp_node["target_fps"].as<float>();
}
return true;
} catch (const YAML::Exception& e) {
Logger::error("Error parsing RTSP config: " + std::string(e.what()));
return false;
}
}
// 工厂函数实现
std::unique_ptr<ConfigParser> createConfigParser() {
return std::make_unique<YamlConfigParser>();
}
} // namespace pipeline

View File

@ -1,37 +1,37 @@
#pragma once
#include <string>
#include <memory>
#include <iostream>
#include <yaml-cpp/yaml.h>
#include "../input/rtsp_reader.hpp"
#include "config_parser.hpp"
namespace pipeline {
class YamlConfigParser : public ConfigParser {
public:
YamlConfigParser() = default;
~YamlConfigParser() override = default;
bool parse(const std::string& config_file);
bool validate() override;
const PipelineConfig& getConfig() const override { return config_; }
// 配置解析函数
bool parseInputConfig(const YAML::Node& node);
bool parseModelConfig(const YAML::Node& node);
bool parseRenderConfig(const YAML::Node& node);
bool parseOutputConfig(const YAML::Node& node);
bool parseLogConfig(const YAML::Node& node);
bool parseRtspConfig(const YAML::Node& config, RtspReader::Config& rtsp_config);
// 获取原始配置
const YAML::Node& getYamlConfig() const { return yaml_config_; }
private:
PipelineConfig config_;
YAML::Node yaml_config_;
};
#pragma once
#include <string>
#include <memory>
#include <iostream>
#include <yaml-cpp/yaml.h>
#include "../input/rtsp_reader.hpp"
#include "config_parser.hpp"
namespace pipeline {
class YamlConfigParser : public ConfigParser {
public:
YamlConfigParser() = default;
~YamlConfigParser() override = default;
bool parse(const std::string& config_file);
bool validate() override;
const PipelineConfig& getConfig() const override { return config_; }
// 配置解析函数
bool parseInputConfig(const YAML::Node& node);
bool parseModelConfig(const YAML::Node& node);
bool parseRenderConfig(const YAML::Node& node);
bool parseOutputConfig(const YAML::Node& node);
bool parseLogConfig(const YAML::Node& node);
bool parseRtspConfig(const YAML::Node& config, RtspReader::Config& rtsp_config);
// 获取原始配置
const YAML::Node& getYamlConfig() const { return yaml_config_; }
private:
PipelineConfig config_;
YAML::Node yaml_config_;
};
} // namespace pipeline

View File

@ -1,87 +1,90 @@
# Pipeline配置文件
input:
sources:
- type: rtsp
name: "camera1"
url: "rtsp://10.0.0.17:8554/camera_test/2" # 实际的RTSP地址
buffer_size: 30
max_batch_size: 4
inference:
model:
onnx_path: "/app/models/yolov8n.onnx" # ONNX模型路径
engine_path: "/app/models/yolov8n.engine" # TensorRT引擎路径
input_shape: [3, 640, 640] # YOLOv8n的输入尺寸
precision: "FP16" # FP32/FP16/INT8
threshold:
conf: 0.5
nms: 0.45
gpu_id: 0
render:
window:
name: "Detection Results"
width: 1280
height: 720
fullscreen: false
# 默认渲染样式
default_style:
box_color: [0, 255, 0] # BGR格式默认绿色
text_color: [255, 255, 255] # BGR格式默认白色
transparency: 0.0 # 0.0-1.00表示不透明
box_thickness: 2
font_scale: 0.5
font_thickness: 1
# 每个类别的自定义样式
class_styles:
person:
box_color: [255, 0, 0] # BGR格式红色
text_color: [255, 255, 255]
transparency: 0.2
box_thickness: 2
font_scale: 0.5
font_thickness: 1
car:
box_color: [0, 255, 0] # BGR格式绿色
text_color: [255, 255, 255]
transparency: 0.2
box_thickness: 2
font_scale: 0.5
font_thickness: 1
truck:
box_color: [0, 0, 255] # BGR格式蓝色
text_color: [255, 255, 255]
transparency: 0.2
box_thickness: 2
font_scale: 0.5
font_thickness: 1
# 性能指标显示设置
metrics:
show_fps: true
show_inference_time: true
show_gpu_usage: true
update_interval_ms: 1000
output:
targets:
- type: "video"
name: "output_video"
path: "/output/result.mp4" # 输出MP4文件路径
fps: 30
codec: "h264" # 视频编码器
bitrate: 4000000 # 4Mbps
- type: "rtsp"
name: "output_rtsp"
path: "rtsp://localhost:8554/live" # RTSP推流地址
fps: 30
codec: "h264" # 视频编码器
bitrate: 4000000 # 4Mbps
# 日志配置
log:
level: "info" # debug/info/warn/error
save_path: "logs/" # 日志保存路径
# Pipeline配置文件
input:
sources:
- type: rtsp
name: "camera1"
url: "rtsp://10.0.0.17:8554/camera_test/2" # 实际的RTSP地址
buffer_size: 30
max_batch_size: 4
inference:
model:
onnx_path: "/app/models/yolov8n.onnx" # ONNX模型路径
engine_path: "/app/models/yolov8n.engine" # TensorRT引擎路径
input_shape: [3, 640, 640] # YOLOv8n的输入尺寸
precision: "FP16" # FP32/FP16/INT8
version: "yolov8" # 可选YOLO版本信息
labels: ["person", "car", "truck"] # 可选:模型标签列表
threshold:
conf: 0.5
nms: 0.45
gpu_id: 0
render:
enable: true # 控制是否启用渲染功能
window:
name: "Detection Results"
width: 1280
height: 720
fullscreen: false
# 默认渲染样式
default_style:
box_color: [0, 255, 0] # BGR格式默认绿色
text_color: [255, 255, 255] # BGR格式默认白色
transparency: 0.0 # 0.0-1.00表示不透明
box_thickness: 2
font_scale: 0.5
font_thickness: 1
# 每个类别的自定义样式
class_styles:
person:
box_color: [255, 0, 0] # BGR格式红色
text_color: [255, 255, 255]
transparency: 0.2
box_thickness: 2
font_scale: 0.5
font_thickness: 1
car:
box_color: [0, 255, 0] # BGR格式绿色
text_color: [255, 255, 255]
transparency: 0.2
box_thickness: 2
font_scale: 0.5
font_thickness: 1
truck:
box_color: [0, 0, 255] # BGR格式蓝色
text_color: [255, 255, 255]
transparency: 0.2
box_thickness: 2
font_scale: 0.5
font_thickness: 1
# 性能指标显示设置
metrics:
show_fps: true
show_inference_time: true
show_gpu_usage: true
update_interval_ms: 1000
output:
targets:
- type: "video"
name: "output_video"
path: "/output/result.mp4" # 输出MP4文件路径
fps: 30
codec: "h264" # 视频编码器
bitrate: 4000000 # 4Mbps
- type: "rtsp"
name: "output_rtsp"
path: "rtsp://localhost:8554/live" # RTSP推流地址
fps: 30
codec: "h264" # 视频编码器
bitrate: 4000000 # 4Mbps
# 日志配置
log:
level: "info" # debug/info/warn/error
save_path: "logs/" # 日志保存路径

View File

@ -41,6 +41,14 @@ bool Renderer::render(const cv::Mat& frame,
}
try {
// 如果渲染功能被禁用,直接返回原始帧的副本
if (!config_.enable) {
Logger::info("Rendering is disabled, returning original frame");
last_frame_ = frame.clone();
return true;
}
Logger::info("Rendering is enabled, processing frame");
// 创建帧的副本用于绘制
cv::Mat display_frame = frame.clone();

View File

@ -13,6 +13,7 @@ namespace renderer { // 添加renderer命名空间
// 渲染器配置结构
struct RendererConfig {
bool enable; // 控制是否启用渲染功能,从配置文件中读取
std::string window_name = "Detection Results";
int window_width = 1280;
int window_height = 720;

View File

@ -0,0 +1 @@

BIN
test_data/videos/raw.mp4 Normal file

Binary file not shown.

View File

@ -6,6 +6,9 @@ find_package(OpenCV REQUIRED)
find_package(yaml-cpp REQUIRED)
find_package(CUDA REQUIRED)
#
set(TEST_CONFIG_PATH ${CMAKE_SOURCE_DIR}/pipeline/configs/pipeline.yaml)
function(add_test_target target_name source_file)
add_executable(${target_name} ${source_file})
target_link_libraries(${target_name}
@ -28,7 +31,13 @@ function(add_test_target target_name source_file)
${CMAKE_SOURCE_DIR}
${TENSORRT_INCLUDE_DIRS}
)
add_test(NAME ${target_name} COMMAND ${target_name})
target_compile_definitions(${target_name}
PRIVATE
TEST_CONFIG_PATH="${TEST_CONFIG_PATH}"
)
add_test(NAME ${target_name}
COMMAND ${target_name}
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR})
endfunction()
#

25
tests/test_base.hpp Normal file
View File

@ -0,0 +1,25 @@
#pragma once
#include <gtest/gtest.h>
#include <string>
#include <filesystem>
#ifndef TEST_CONFIG_PATH
#define TEST_CONFIG_PATH "../pipeline/configs/pipeline.yaml"
#endif
namespace pipeline {
namespace test {
class TestBase : public ::testing::Test {
protected:
const std::string config_path = TEST_CONFIG_PATH;
void SetUp() override {
// 确保pipeline.yaml存在
ASSERT_TRUE(std::filesystem::exists(config_path)) << "pipeline.yaml not found at: " << config_path;
}
};
} // namespace test
} // namespace pipeline

View File

@ -1,100 +1,183 @@
#include <gtest/gtest.h>
#include "pipeline/common/config_parser.hpp"
using namespace pipeline;
// 测试配置数据结构的默认值
TEST(ConfigParserTest, DefaultValues) {
// 测试InputSourceConfig默认值
InputSourceConfig input_source;
EXPECT_EQ(input_source.buffer_size, 30);
EXPECT_TRUE(input_source.type.empty());
EXPECT_TRUE(input_source.name.empty());
EXPECT_TRUE(input_source.url.empty());
// 测试InputConfig默认值
InputConfig input;
EXPECT_EQ(input.max_batch_size, 4);
EXPECT_TRUE(input.sources.empty());
// 测试ModelConfig默认值
ModelConfig model;
EXPECT_TRUE(model.engine_path.empty());
EXPECT_EQ(model.input_shape, std::vector<int>({3, 640, 640}));
EXPECT_EQ(model.precision, "FP16");
EXPECT_FLOAT_EQ(model.threshold.conf, 0.5f);
EXPECT_FLOAT_EQ(model.threshold.nms, 0.45f);
EXPECT_EQ(model.gpu_id, 0);
// 测试RenderConfig默认值
RenderConfig render;
EXPECT_TRUE(render.draw_fps);
EXPECT_TRUE(render.class_colors.empty());
EXPECT_EQ(render.line_thickness, 2);
EXPECT_FLOAT_EQ(render.font_scale, 0.5f);
// 测试OutputTargetConfig默认值
OutputTargetConfig output_target;
EXPECT_TRUE(output_target.type.empty());
EXPECT_TRUE(output_target.name.empty());
EXPECT_TRUE(output_target.path.empty());
EXPECT_EQ(output_target.fps, 30);
EXPECT_EQ(output_target.codec, "h264");
EXPECT_EQ(output_target.bitrate, 4000000);
// 测试LogConfig默认值
LogConfig log;
EXPECT_EQ(log.level, "info");
EXPECT_EQ(log.save_path, "logs/");
}
// 测试配置数据结构的赋值
TEST(ConfigParserTest, Assignment) {
// 测试InputSourceConfig赋值
InputSourceConfig input_source;
input_source.type = "rtsp";
input_source.name = "camera1";
input_source.url = "rtsp://example.com";
input_source.buffer_size = 50;
EXPECT_EQ(input_source.type, "rtsp");
EXPECT_EQ(input_source.name, "camera1");
EXPECT_EQ(input_source.url, "rtsp://example.com");
EXPECT_EQ(input_source.buffer_size, 50);
// 测试完整Pipeline配置
PipelineConfig config;
// 设置输入配置
config.input.max_batch_size = 8;
config.input.sources.push_back(input_source);
// 设置推理配置
config.inference.engine_path = "/path/to/model.engine";
config.inference.precision = "FP32";
config.inference.threshold.conf = 0.6f;
// 设置渲染配置
config.render.draw_fps = false;
config.render.line_thickness = 3;
// 验证设置的值
EXPECT_EQ(config.input.max_batch_size, 8);
EXPECT_EQ(config.input.sources.size(), 1);
EXPECT_EQ(config.inference.engine_path, "/path/to/model.engine");
EXPECT_EQ(config.inference.precision, "FP32");
EXPECT_FLOAT_EQ(config.inference.threshold.conf, 0.6f);
EXPECT_FALSE(config.render.draw_fps);
EXPECT_EQ(config.render.line_thickness, 3);
}
// 测试颜色配置
TEST(ConfigParserTest, ColorConfig) {
RenderConfig render;
render.class_colors["person"] = cv::Scalar(255, 0, 0); // BGR
render.class_colors["car"] = cv::Scalar(0, 255, 0);
EXPECT_EQ(render.class_colors.size(), 2);
EXPECT_EQ(render.class_colors["person"], cv::Scalar(255, 0, 0));
EXPECT_EQ(render.class_colors["car"], cv::Scalar(0, 255, 0));
}
#include <gtest/gtest.h>
#include "pipeline/common/config_parser.hpp"
using namespace pipeline;
// 测试配置数据结构的默认值
TEST(ConfigParserTest, DefaultValues) {
// 测试InputSourceConfig默认值
InputSourceConfig input_source;
EXPECT_EQ(input_source.buffer_size, 30);
EXPECT_TRUE(input_source.type.empty());
EXPECT_TRUE(input_source.name.empty());
EXPECT_TRUE(input_source.url.empty());
EXPECT_TRUE(input_source.outputs.empty());
// 测试InputConfig默认值
InputConfig input;
EXPECT_EQ(input.max_batch_size, 4);
EXPECT_TRUE(input.sources.empty());
// 测试ModelConfig默认值
ModelConfig model;
EXPECT_TRUE(model.engine_path.empty());
EXPECT_EQ(model.input_shape, std::vector<int>({3, 640, 640}));
EXPECT_EQ(model.precision, "FP16");
EXPECT_FLOAT_EQ(model.threshold.conf, 0.5f);
EXPECT_FLOAT_EQ(model.threshold.nms, 0.45f);
EXPECT_EQ(model.gpu_id, 0);
EXPECT_TRUE(model.labels.empty());
EXPECT_TRUE(model.version.empty());
// 测试RenderConfig默认值
RenderConfig render;
EXPECT_EQ(render.window.name, "Detection Results");
EXPECT_EQ(render.window.width, 1280);
EXPECT_EQ(render.window.height, 720);
EXPECT_FALSE(render.window.fullscreen);
// 测试默认渲染样式
EXPECT_EQ(render.default_style.box_color, cv::Scalar(0, 255, 0));
EXPECT_EQ(render.default_style.text_color, cv::Scalar(255, 255, 255));
EXPECT_FLOAT_EQ(render.default_style.transparency, 0.0f);
EXPECT_EQ(render.default_style.box_thickness, 2);
EXPECT_DOUBLE_EQ(render.default_style.font_scale, 0.5);
EXPECT_EQ(render.default_style.font_thickness, 1);
// 测试性能指标显示设置
EXPECT_TRUE(render.metrics.show_fps);
EXPECT_TRUE(render.metrics.show_inference_time);
EXPECT_TRUE(render.metrics.show_gpu_usage);
EXPECT_EQ(render.metrics.update_interval_ms, 1000);
// 测试OutputTargetConfig默认值
OutputTargetConfig output_target;
EXPECT_TRUE(output_target.type.empty());
EXPECT_TRUE(output_target.name.empty());
EXPECT_TRUE(output_target.path.empty());
EXPECT_EQ(output_target.fps, 30);
EXPECT_EQ(output_target.codec, "h264");
EXPECT_EQ(output_target.bitrate, 4000000);
// 测试LogConfig默认值
LogConfig log;
EXPECT_EQ(log.level, "info");
EXPECT_EQ(log.save_path, "logs/");
}
// 测试配置数据结构的赋值
TEST(ConfigParserTest, Assignment) {
// 测试InputSourceConfig赋值
InputSourceConfig input_source;
input_source.type = "rtsp";
input_source.name = "camera1";
input_source.url = "rtsp://example.com";
input_source.buffer_size = 50;
input_source.outputs = {"output1", "output2"};
EXPECT_EQ(input_source.type, "rtsp");
EXPECT_EQ(input_source.name, "camera1");
EXPECT_EQ(input_source.url, "rtsp://example.com");
EXPECT_EQ(input_source.buffer_size, 50);
EXPECT_EQ(input_source.outputs.size(), 2);
// 测试完整Pipeline配置
PipelineConfig config;
// 设置输入配置
config.input.max_batch_size = 8;
config.input.sources.push_back(input_source);
// 设置推理配置
config.inference.engine_path = "/path/to/model.engine";
config.inference.precision = "FP32";
config.inference.threshold.conf = 0.6f;
config.inference.labels = {"person", "car"};
config.inference.version = "yolov8";
// 设置渲染配置
RenderConfig::ClassStyle style;
style.box_color = cv::Scalar(255, 0, 0);
config.render.class_styles["person"] = style;
// 验证设置的值
EXPECT_EQ(config.input.max_batch_size, 8);
EXPECT_EQ(config.input.sources.size(), 1);
EXPECT_EQ(config.inference.engine_path, "/path/to/model.engine");
EXPECT_EQ(config.inference.precision, "FP32");
EXPECT_FLOAT_EQ(config.inference.threshold.conf, 0.6f);
EXPECT_EQ(config.inference.labels.size(), 2);
EXPECT_EQ(config.inference.version, "yolov8");
EXPECT_EQ(config.render.class_styles.size(), 1);
}
// 测试渲染样式配置
TEST(ConfigParserTest, RenderStyleConfig) {
RenderConfig render;
// 设置自定义样式
RenderConfig::ClassStyle person_style;
person_style.box_color = cv::Scalar(255, 0, 0); // BGR
person_style.text_color = cv::Scalar(255, 255, 255);
person_style.transparency = 0.2f;
render.class_styles["person"] = person_style;
// 验证样式设置
EXPECT_EQ(render.class_styles.size(), 1);
EXPECT_EQ(render.class_styles["person"].box_color, cv::Scalar(255, 0, 0));
EXPECT_EQ(render.class_styles["person"].text_color, cv::Scalar(255, 255, 255));
EXPECT_FLOAT_EQ(render.class_styles["person"].transparency, 0.2f);
}
// 测试模型标签和版本配置
TEST(ConfigParserTest, ModelLabelsAndVersion) {
// 测试ModelConfig的新增字段默认值
ModelConfig model;
EXPECT_TRUE(model.labels.empty());
EXPECT_TRUE(model.version.empty());
// 测试标签配置
model.labels = {"person", "car", "truck"};
EXPECT_EQ(model.labels.size(), 3);
EXPECT_EQ(model.labels[0], "person");
EXPECT_EQ(model.labels[1], "car");
EXPECT_EQ(model.labels[2], "truck");
// 测试版本配置
model.version = "yolov8";
EXPECT_EQ(model.version, "yolov8");
// 测试完整Pipeline配置中的标签和版本
PipelineConfig config;
config.inference.labels = {"person", "car", "truck"};
config.inference.version = "yolov8";
EXPECT_EQ(config.inference.labels.size(), 3);
EXPECT_EQ(config.inference.version, "yolov8");
}
// 测试渲染样式与标签的匹配
TEST(ConfigParserTest, RenderStyleLabelMapping) {
PipelineConfig config;
// 设置模型标签
config.inference.labels = {"person", "car", "truck"};
// 设置渲染样式
RenderConfig::ClassStyle person_style;
person_style.box_color = cv::Scalar(255, 0, 0); // BGR
config.render.class_styles["person"] = person_style;
RenderConfig::ClassStyle car_style;
car_style.box_color = cv::Scalar(0, 255, 0);
config.render.class_styles["car"] = car_style;
// 验证标签和样式的匹配
EXPECT_EQ(config.render.class_styles.count("person"), 1);
EXPECT_EQ(config.render.class_styles.count("car"), 1);
EXPECT_EQ(config.render.class_styles["person"].box_color, cv::Scalar(255, 0, 0));
EXPECT_EQ(config.render.class_styles["car"].box_color, cv::Scalar(0, 255, 0));
// 验证未配置样式的标签使用默认样式
EXPECT_EQ(config.render.class_styles.count("truck"), 0);
}

View File

@ -152,103 +152,100 @@ TEST_F(OutputManagerTest, ResourceManagement) {
TEST_F(OutputManagerTest, ConcurrentWrite) {
OutputManager manager;
// 添加多个视频输出目标
for (int i = 0; i < 3; ++i) {
auto config = createVideoConfig("test_video_" + std::to_string(i));
ASSERT_TRUE(manager.addTarget(config));
}
// 添加两个视频输出目标
auto video_config1 = createVideoConfig("test_video_concurrent1");
ASSERT_TRUE(manager.addTarget(video_config1));
// 创建多个线程同时写入
std::vector<std::thread> threads;
for (int i = 0; i < 3; ++i) {
threads.emplace_back([&manager, this, i]() {
std::string source = "source_" + std::to_string(i);
std::vector<std::string> targets = {"test_video_" + std::to_string(i)};
EXPECT_TRUE(manager.addSourceTargetMapping(source, targets));
for (int j = 0; j < 10; ++j) {
EXPECT_TRUE(manager.writeFrames(source, test_frame_));
std::this_thread::sleep_for(std::chrono::milliseconds(10));
}
});
}
auto video_config2 = createVideoConfig("test_video_concurrent2");
ASSERT_TRUE(manager.addTarget(video_config2));
// 等待所有线程完成
for (auto& thread : threads) {
thread.join();
}
// 添加源到目标的映射
ASSERT_TRUE(manager.addSourceTargetMapping("source1", {"test_video_concurrent1"}));
ASSERT_TRUE(manager.addSourceTargetMapping("source2", {"test_video_concurrent2"}));
// 创建两个线程同时写入不同的输出目标
std::thread t1([&]() {
for (int i = 0; i < 10; ++i) {
EXPECT_TRUE(manager.writeFrames("source1", test_frame_));
std::this_thread::sleep_for(std::chrono::milliseconds(10));
}
});
std::thread t2([&]() {
for (int i = 0; i < 10; ++i) {
EXPECT_TRUE(manager.writeFrames("source2", test_frame_));
std::this_thread::sleep_for(std::chrono::milliseconds(10));
}
});
// 等待线程完成
t1.join();
t2.join();
// 验证输出文件存在
ASSERT_TRUE(fs::exists(video_config1.path));
ASSERT_TRUE(fs::exists(video_config2.path));
}
// 测试一个输入源映射到多个输出目标
TEST_F(OutputManagerTest, MultipleTargetsMapping) {
OutputManager manager;
// 添加多个输出目标
auto video_config1 = createVideoConfig("test_video_multi1");
ASSERT_TRUE(manager.addTarget(video_config1));
auto video_config2 = createVideoConfig("test_video_multi2");
ASSERT_TRUE(manager.addTarget(video_config2));
auto video_config3 = createVideoConfig("test_video_multi3");
ASSERT_TRUE(manager.addTarget(video_config3));
// 将一个输入源映射到多个输出目标
std::vector<std::string> targets = {
"test_video_multi1",
"test_video_multi2",
"test_video_multi3"
};
ASSERT_TRUE(manager.addSourceTargetMapping("source_multi", targets));
// 写入帧到所有映射的目标
ASSERT_TRUE(manager.writeFrames("source_multi", test_frame_));
// 验证所有输出文件都存在
for (int i = 0; i < 3; ++i) {
std::string path = test_dir_ + "/test_video_" + std::to_string(i) + ".mp4";
EXPECT_TRUE(fs::exists(path));
}
ASSERT_TRUE(fs::exists(video_config1.path));
ASSERT_TRUE(fs::exists(video_config2.path));
ASSERT_TRUE(fs::exists(video_config3.path));
}
// 测试配置验证
TEST_F(OutputManagerTest, ConfigValidation) {
// 测试FPS参数边界值
TEST_F(OutputManagerTest, FpsBoundaryTest) {
OutputManager manager;
// 测试无效的类型
{
OutputTargetConfig config;
config.type = "invalid";
config.name = "test_invalid";
EXPECT_FALSE(manager.addTarget(config));
}
// 测试FPS为0
auto config_fps_zero = createVideoConfig("test_video_fps_zero");
config_fps_zero.fps = 0;
EXPECT_FALSE(manager.addTarget(config_fps_zero));
// 测试空路径
{
OutputTargetConfig config;
config.type = "video";
config.name = "test_empty_path";
config.path = "";
EXPECT_FALSE(manager.addTarget(config));
}
// 测试FPS为负数
auto config_fps_negative = createVideoConfig("test_video_fps_negative");
config_fps_negative.fps = -30;
EXPECT_FALSE(manager.addTarget(config_fps_negative));
// 测试无效的帧率
{
OutputTargetConfig config;
config.type = "video";
config.name = "test_invalid_fps";
config.path = test_dir_ + "/test.mp4";
config.fps = 0;
EXPECT_FALSE(manager.addTarget(config));
}
// 测试FPS为最小有效值
auto config_fps_min = createVideoConfig("test_video_fps_min");
config_fps_min.fps = 1;
EXPECT_TRUE(manager.addTarget(config_fps_min));
// 测试无效的比特率
{
OutputTargetConfig config;
config.type = "video";
config.name = "test_invalid_bitrate";
config.path = test_dir_ + "/test.mp4";
config.bitrate = -1;
EXPECT_FALSE(manager.addTarget(config));
}
}
// 测试状态查询
TEST_F(OutputManagerTest, StatusQuery) {
OutputManager manager;
// 测试FPS为正常值
auto config_fps_normal = createVideoConfig("test_video_fps_normal");
config_fps_normal.fps = 60;
EXPECT_TRUE(manager.addTarget(config_fps_normal));
// 添加视频输出目标
auto video_config = createVideoConfig("test_video_status");
ASSERT_TRUE(manager.addTarget(video_config));
// 测试目标状态查询
std::string error_msg;
EXPECT_TRUE(manager.getTargetStatus("test_video_status", error_msg));
EXPECT_TRUE(error_msg.empty());
// 测试不存在的目标
EXPECT_FALSE(manager.getTargetStatus("non_existent", error_msg));
EXPECT_FALSE(error_msg.empty());
// 写入一些帧后检查状态
for (int i = 0; i < 10; ++i) {
EXPECT_TRUE(manager.writeFrames("test_video_status", test_frame_));
}
EXPECT_TRUE(manager.getTargetStatus("test_video_status", error_msg));
EXPECT_TRUE(error_msg.empty());
// 验证正常FPS的输出是否工作
ASSERT_TRUE(manager.addSourceTargetMapping("source_fps", {"test_video_fps_normal"}));
ASSERT_TRUE(manager.writeFrames("source_fps", test_frame_));
ASSERT_TRUE(fs::exists(config_fps_normal.path));
}
} // namespace pipeline

View File

@ -3,16 +3,218 @@
#include <fstream>
#include <opencv2/opencv.hpp>
#include "pipeline/common/pipeline.hpp"
#include "pipeline/common/yaml_config_parser.hpp"
namespace fs = std::filesystem;
namespace pipeline {
class PipelineTest : public ::testing::Test {
protected:
void SetUp() override;
void TearDown() override;
void SetUp() override {
// 创建测试目录
test_dir_ = "/tmp/test_pipeline";
if (fs::exists(test_dir_)) {
fs::remove_all(test_dir_);
}
fs::create_directories(test_dir_);
// 创建日志目录
log_dir_ = test_dir_ + "/logs";
fs::create_directories(log_dir_);
// 创建测试视频
video_path_ = test_dir_ + "/test_video.mp4";
createTestVideo(video_path_);
// 创建配置文件
config_path_ = test_dir_ + "/config.yaml";
createTestConfig(config_path_);
}
void TearDown() override {
// 只在测试成功时删除文件
if (!::testing::Test::HasFailure() && fs::exists(test_dir_)) {
fs::remove_all(test_dir_);
}
}
void createTestVideo(const std::string& path) {
cv::Mat frame(480, 640, CV_8UC3, cv::Scalar(0, 0, 0));
cv::VideoWriter writer(path, cv::VideoWriter::fourcc('a', 'v', 'c', '1'), 30, frame.size());
for (int i = 0; i < 30; ++i) {
cv::putText(frame, "Frame " + std::to_string(i), cv::Point(50, 50),
cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(255, 255, 255), 2);
writer.write(frame);
}
writer.release();
}
void createTestConfig(const std::string& path) {
std::ofstream config_file(path);
// 输入配置
config_file << "input:\n";
config_file << " max_batch_size: 1\n";
config_file << " sources:\n";
config_file << " - type: video\n";
config_file << " name: test_video\n";
config_file << " url: " << video_path_ << "\n";
config_file << " buffer_size: 30\n";
config_file << " outputs: [output_video]\n";
config_file << "\n";
// 推理配置
config_file << "inference:\n";
config_file << " model:\n";
config_file << " onnx_path: /app/models/yolov8n.onnx\n";
config_file << " engine_path: /app/models/yolov8n.engine\n";
config_file << " input_shape: [3, 640, 640]\n";
config_file << " precision: FP16\n";
config_file << " threshold:\n";
config_file << " conf: 0.5\n";
config_file << " nms: 0.45\n";
config_file << " gpu_id: 0\n";
config_file << "\n";
// 渲染配置
config_file << "render:\n";
config_file << " enable: true\n";
config_file << " window:\n";
config_file << " name: Detection Results\n";
config_file << " width: 1280\n";
config_file << " height: 720\n";
config_file << " fullscreen: false\n";
config_file << " default_style:\n";
config_file << " box_color: [0, 255, 0]\n";
config_file << " text_color: [255, 255, 255]\n";
config_file << " transparency: 0.0\n";
config_file << " box_thickness: 2\n";
config_file << " font_scale: 0.5\n";
config_file << " font_thickness: 1\n";
config_file << " class_styles:\n";
config_file << " person:\n";
config_file << " box_color: [0, 255, 0]\n";
config_file << " text_color: [255, 255, 255]\n";
config_file << " transparency: 0.0\n";
config_file << " box_thickness: 2\n";
config_file << " font_scale: 0.5\n";
config_file << " font_thickness: 1\n";
config_file << " metrics:\n";
config_file << " show_fps: true\n";
config_file << " show_inference_time: true\n";
config_file << " show_gpu_usage: true\n";
config_file << " update_interval_ms: 1000\n";
config_file << "\n";
// 输出配置
config_file << "output:\n";
config_file << " targets:\n";
config_file << " - type: video\n";
config_file << " name: output_video\n";
config_file << " path: " << test_dir_ << "/output.mp4\n";
config_file << " fps: 30\n";
config_file << " codec: h264\n";
config_file << " bitrate: 4000000\n";
config_file << "\n";
// 日志配置
config_file << "log:\n";
config_file << " level: info\n";
config_file << " save_path: " << log_dir_ << "\n\n";
config_file.close();
}
std::string test_dir_;
std::string video_path_;
std::string config_path_;
std::string log_dir_;
};
// 测试配置加载
TEST_F(PipelineTest, ConfigLoading) {
YamlConfigParser parser;
ASSERT_TRUE(parser.parse(config_path_));
const auto& config = parser.getConfig();
// 验证输入配置
EXPECT_EQ(config.input.max_batch_size, 1);
EXPECT_EQ(config.input.sources.size(), 1);
EXPECT_EQ(config.input.sources[0].type, "video");
EXPECT_EQ(config.input.sources[0].name, "test_video");
EXPECT_EQ(config.input.sources[0].url, video_path_);
// 验证推理配置
EXPECT_EQ(config.inference.engine_path, "/app/models/yolov8n.engine");
EXPECT_EQ(config.inference.input_shape, std::vector<int>({3, 640, 640}));
EXPECT_EQ(config.inference.precision, "FP16");
// 验证输出配置
EXPECT_EQ(config.output.targets.size(), 1);
EXPECT_EQ(config.output.targets[0].type, "video");
EXPECT_EQ(config.output.targets[0].name, "output_video");
}
// 测试Pipeline初始化
TEST_F(PipelineTest, Initialization) {
Pipeline pipeline(config_path_, true); // 使用测试模式
ASSERT_TRUE(pipeline.init());
ASSERT_TRUE(pipeline.start());
EXPECT_TRUE(pipeline.isRunning());
pipeline.stop();
pipeline.wait();
}
// 测试Pipeline运行
TEST_F(PipelineTest, Running) {
Pipeline pipeline(config_path_, true); // 使用测试模式
ASSERT_TRUE(pipeline.init());
// 运行一段时间
ASSERT_TRUE(pipeline.start());
std::this_thread::sleep_for(std::chrono::seconds(2));
pipeline.stop();
pipeline.wait();
// 验证输出文件是否生成
std::string output_path = test_dir_ + "/output.mp4";
EXPECT_TRUE(fs::exists(output_path));
EXPECT_GT(fs::file_size(output_path), 0);
}
// 测试错误处理
TEST_F(PipelineTest, ErrorHandling) {
// 测试无效的配置文件
Pipeline pipeline_invalid("invalid_config.yaml", true);
EXPECT_FALSE(pipeline_invalid.init());
// 测试无效的引擎路径
std::string invalid_config_path = test_dir_ + "/invalid_config.yaml";
std::ofstream config_file(invalid_config_path);
config_file << "inference:\n";
config_file << " model:\n";
config_file << " onnx_path: /app/models/yolov8n.onnx\n";
config_file << " engine_path: invalid_engine_path\n";
config_file << " input_shape: [3, 640, 640]\n";
config_file.close();
Pipeline pipeline_invalid_engine(invalid_config_path, true);
EXPECT_FALSE(pipeline_invalid_engine.init());
}
// 测试资源管理
TEST_F(PipelineTest, ResourceManagement) {
{
Pipeline pipeline(config_path_, true);
ASSERT_TRUE(pipeline.init());
ASSERT_TRUE(pipeline.start());
std::this_thread::sleep_for(std::chrono::seconds(1));
// 不调用stop,让析构函数处理
}
// 验证资源是否正确释放
EXPECT_TRUE(fs::exists(test_dir_ + "/output.mp4"));
}
} // namespace pipeline

View File

@ -13,6 +13,7 @@ protected:
config_.window_height = 600;
config_.fullscreen = false;
config_.test_mode = true; // 启用测试模式
config_.enable = true; // 默认启用渲染
// 创建测试图像
test_frame_ = cv::Mat(480, 640, CV_8UC3, cv::Scalar(0, 0, 0));
@ -51,6 +52,30 @@ protected:
return true;
}
// 辅助函数比较两个BGR图像是否相同
bool compareBGRImages(const cv::Mat& img1, const cv::Mat& img2) {
if (img1.size() != img2.size() || img1.type() != img2.type()) {
std::cout << "Image size or type mismatch" << std::endl;
std::cout << "img1: " << img1.size() << " type: " << img1.type() << std::endl;
std::cout << "img2: " << img2.size() << " type: " << img2.type() << std::endl;
return false;
}
cv::Mat diff;
cv::absdiff(img1, img2, diff);
std::vector<cv::Mat> channels;
cv::split(diff, channels);
// 检查每个通道的差异
for (size_t i = 0; i < channels.size(); ++i) {
int nonZero = cv::countNonZero(channels[i]);
if (nonZero > 0) {
std::cout << "Channel " << i << " has " << nonZero << " different pixels" << std::endl;
return false;
}
}
return true;
}
renderer::RendererConfig config_;
Renderer renderer_;
cv::Mat test_frame_;
@ -143,6 +168,54 @@ TEST_F(RendererTest, EdgeCases) {
EXPECT_TRUE(renderer_.render(gray_frame, test_results_, metrics_));
}
// 测试禁用渲染功能
TEST_F(RendererTest, DisabledRendering) {
// 禁用渲染
config_.enable = false;
ASSERT_TRUE(renderer_.init(config_));
// 创建一个有颜色的测试帧
cv::Mat colored_frame(480, 640, CV_8UC3, cv::Scalar(255, 0, 0)); // 蓝色帧
// 渲染应该成功,但不会绘制任何检测结果
EXPECT_TRUE(renderer_.render(colored_frame, test_results_, metrics_));
// 获取渲染后的帧
cv::Mat rendered_frame = renderer_.getLastFrame();
// 验证帧未被修改(没有绘制检测框和文本)
EXPECT_TRUE(!rendered_frame.empty());
EXPECT_EQ(rendered_frame.size(), colored_frame.size());
// 检查帧的内容是否与原始帧相同
EXPECT_TRUE(compareBGRImages(rendered_frame, colored_frame));
}
// 测试启用和禁用渲染的切换
TEST_F(RendererTest, ToggleRendering) {
// 创建一个有颜色的测试帧
cv::Mat colored_frame(480, 640, CV_8UC3, cv::Scalar(255, 0, 0)); // 蓝色帧
// 先启用渲染
config_.enable = true;
ASSERT_TRUE(renderer_.init(config_));
EXPECT_TRUE(renderer_.render(colored_frame, test_results_, metrics_));
cv::Mat rendered_frame_enabled = renderer_.getLastFrame();
// 验证检测结果被绘制(帧应该被修改)
EXPECT_FALSE(compareBGRImages(rendered_frame_enabled, colored_frame));
// 禁用渲染
renderer_.cleanup(); // 先清理现有的渲染器
config_.enable = false;
ASSERT_TRUE(renderer_.init(config_));
EXPECT_TRUE(renderer_.render(colored_frame, test_results_, metrics_));
cv::Mat rendered_frame_disabled = renderer_.getLastFrame();
// 验证帧未被修改
EXPECT_TRUE(compareBGRImages(rendered_frame_disabled, colored_frame));
}
int main(int argc, char **argv) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();

View File

@ -1,5 +1,6 @@
#include <gtest/gtest.h>
#include "pipeline/input/video_reader.hpp"
#include "test_base.hpp"
#include <filesystem>
#include <fstream>
#include <thread>
@ -8,9 +9,10 @@
using namespace pipeline;
class VideoReaderTest : public ::testing::Test {
class VideoReaderTest : public test::TestBase {
protected:
void SetUp() override {
test::TestBase::SetUp(); // 调用基类的SetUp
// 创建测试视频文件
createTestVideo("test_video.mp4");
}

View File

@ -1,367 +1,186 @@
#include <gtest/gtest.h>
#include "pipeline/common/yaml_config_parser.hpp"
#include <fstream>
#include <filesystem>
using namespace pipeline;
class YamlConfigTest : public ::testing::Test {
protected:
void SetUp() override {
// 创建测试配置文件
std::ofstream test_config("test_config.yaml");
test_config << R"(
input:
sources:
- type: rtsp
name: camera1
url: rtsp://example.com/stream1
buffer_size: 30
max_batch_size: 4
inference:
model:
onnx_path: /path/to/model.onnx
engine_path: /path/to/model.engine
input_shape: [3, 640, 640]
precision: FP16
threshold:
conf: 0.5
nms: 0.45
gpu_id: 0
render:
window:
name: "Detection Results"
width: 1280
height: 720
fullscreen: false
default_style:
box_color: [0, 255, 0]
text_color: [255, 255, 255]
transparency: 0.0
box_thickness: 2
font_scale: 0.5
font_thickness: 1
class_styles:
person:
box_color: [255, 0, 0]
text_color: [255, 255, 255]
transparency: 0.2
box_thickness: 2
font_scale: 0.5
font_thickness: 1
car:
box_color: [0, 255, 0]
text_color: [255, 255, 255]
transparency: 0.2
box_thickness: 2
font_scale: 0.5
font_thickness: 1
metrics:
show_fps: true
show_inference_time: true
show_gpu_usage: true
update_interval_ms: 1000
output:
targets:
- type: video
name: output1
path: /path/to/output.mp4
fps: 30
codec: h264
bitrate: 4000000
log:
level: info
save_path: logs/
)";
test_config.close();
}
void TearDown() override {
// 清理测试文件
std::filesystem::remove("test_config.yaml");
std::filesystem::remove("invalid_config.yaml");
}
};
TEST_F(YamlConfigTest, LoadConfig) {
auto parser = createConfigParser();
ASSERT_TRUE(parser->parse("test_config.yaml"));
const auto& config = parser->getConfig();
// 验证输入配置
ASSERT_EQ(config.input.sources.size(), 1);
EXPECT_EQ(config.input.sources[0].type, "rtsp");
EXPECT_EQ(config.input.sources[0].name, "camera1");
EXPECT_EQ(config.input.sources[0].url, "rtsp://example.com/stream1");
EXPECT_EQ(config.input.sources[0].buffer_size, 30);
EXPECT_EQ(config.input.max_batch_size, 4);
// 验证推理配置
EXPECT_EQ(config.inference.engine_path, "/path/to/model.engine");
ASSERT_EQ(config.inference.input_shape.size(), 3);
EXPECT_EQ(config.inference.input_shape[0], 3);
EXPECT_EQ(config.inference.input_shape[1], 640);
EXPECT_EQ(config.inference.input_shape[2], 640);
EXPECT_EQ(config.inference.precision, "FP16");
EXPECT_FLOAT_EQ(config.inference.threshold.conf, 0.5f);
EXPECT_FLOAT_EQ(config.inference.threshold.nms, 0.45f);
EXPECT_EQ(config.inference.gpu_id, 0);
// 验证渲染配置
// 窗口配置
EXPECT_EQ(config.render.window.name, "Detection Results");
EXPECT_EQ(config.render.window.width, 1280);
EXPECT_EQ(config.render.window.height, 720);
EXPECT_FALSE(config.render.window.fullscreen);
// 默认样式
EXPECT_EQ(config.render.default_style.box_color, cv::Scalar(0, 255, 0));
EXPECT_EQ(config.render.default_style.text_color, cv::Scalar(255, 255, 255));
EXPECT_FLOAT_EQ(config.render.default_style.transparency, 0.0f);
EXPECT_EQ(config.render.default_style.box_thickness, 2);
EXPECT_DOUBLE_EQ(config.render.default_style.font_scale, 0.5);
EXPECT_EQ(config.render.default_style.font_thickness, 1);
// 类别样式
ASSERT_EQ(config.render.class_styles.size(), 2);
const auto& person_style = config.render.class_styles.at("person");
EXPECT_EQ(person_style.box_color, cv::Scalar(255, 0, 0));
EXPECT_EQ(person_style.text_color, cv::Scalar(255, 255, 255));
EXPECT_FLOAT_EQ(person_style.transparency, 0.2f);
const auto& car_style = config.render.class_styles.at("car");
EXPECT_EQ(car_style.box_color, cv::Scalar(0, 255, 0));
EXPECT_EQ(car_style.text_color, cv::Scalar(255, 255, 255));
EXPECT_FLOAT_EQ(car_style.transparency, 0.2f);
// 性能指标配置
EXPECT_TRUE(config.render.metrics.show_fps);
EXPECT_TRUE(config.render.metrics.show_inference_time);
EXPECT_TRUE(config.render.metrics.show_gpu_usage);
EXPECT_EQ(config.render.metrics.update_interval_ms, 1000);
// 验证输出配置
ASSERT_EQ(config.output.targets.size(), 1);
EXPECT_EQ(config.output.targets[0].type, "video");
EXPECT_EQ(config.output.targets[0].name, "output1");
EXPECT_EQ(config.output.targets[0].path, "/path/to/output.mp4");
EXPECT_EQ(config.output.targets[0].fps, 30);
EXPECT_EQ(config.output.targets[0].codec, "h264");
EXPECT_EQ(config.output.targets[0].bitrate, 4000000);
// 验证日志配置
EXPECT_EQ(config.log.level, "info");
EXPECT_EQ(config.log.save_path, "logs/");
}
TEST_F(YamlConfigTest, InvalidConfig) {
auto parser = createConfigParser();
// 测试不存在的文件
EXPECT_FALSE(parser->parse("non_existent.yaml"));
// 创建无效的配置文件
std::ofstream invalid_config("invalid_config.yaml");
invalid_config << R"(
input:
sources: [] #
)";
invalid_config.close();
// 测试无效的配置
EXPECT_FALSE(parser->parse("invalid_config.yaml"));
}
TEST_F(YamlConfigTest, InvalidConfigs) {
auto parser = createConfigParser();
// 测试各种无效配置场景
std::vector<std::pair<std::string, std::string>> invalid_configs = {
// 空输入源
{R"(
input:
sources: []
inference:
model:
engine_path: /path/to/model.engine
input_shape: [3, 640, 640]
)", ""},
// 无效的输入源配置
{R"(
input:
sources:
- type: rtsp
name: "" #
url: rtsp://example.com/stream1
inference:
model:
engine_path: /path/to/model.engine
input_shape: [3, 640, 640]
)", ""},
// 无效的推理配置
{R"(
input:
sources:
- type: rtsp
name: camera1
url: rtsp://example.com/stream1
inference:
model:
engine_path: "" #
input_shape: [3, 640, 640]
)", ""},
// 无效的渲染配置
{R"(
input:
sources:
- type: rtsp
name: camera1
url: rtsp://example.com/stream1
inference:
model:
engine_path: /path/to/model.engine
input_shape: [3, 640, 640]
render:
window:
name: "Detection Results"
width: -1 #
height: 720
)", ""},
// 无效的样式配置
{R"(
input:
sources:
- type: rtsp
name: camera1
url: rtsp://example.com/stream1
inference:
model:
engine_path: /path/to/model.engine
input_shape: [3, 640, 640]
render:
window:
name: "Detection Results"
width: 1280
height: 720
default_style:
transparency: 2.0 #
)", ""},
// 无效的输出配置
{R"(
input:
sources:
- type: rtsp
name: camera1
url: rtsp://example.com/stream1
inference:
model:
engine_path: /path/to/model.engine
input_shape: [3, 640, 640]
output:
targets:
- type: video
name: output1
path: "" #
fps: 30
)", ""},
// 无效的日志配置
{R"(
input:
sources:
- type: rtsp
name: camera1
url: rtsp://example.com/stream1
inference:
model:
engine_path: /path/to/model.engine
input_shape: [3, 640, 640]
log:
level: "" #
save_path: logs/
)", ""}
};
for (const auto& [config_str, desc] : invalid_configs) {
std::ofstream config_file("invalid_config.yaml");
config_file << config_str;
config_file.close();
EXPECT_FALSE(parser->parse("invalid_config.yaml")) << "应该检测到无效配置: " << desc;
std::filesystem::remove("invalid_config.yaml");
}
}
TEST_F(YamlConfigTest, ValidateThresholds) {
auto parser = createConfigParser();
// 测试阈值验证
std::string config_str = R"(
input:
sources:
- type: rtsp
name: camera1
url: rtsp://example.com/stream1
inference:
model:
engine_path: /path/to/model.engine
input_shape: [3, 640, 640]
threshold:
conf: 1.5 #
nms: 0.45
)";
std::ofstream config_file("invalid_threshold.yaml");
config_file << config_str;
config_file.close();
EXPECT_FALSE(parser->parse("invalid_threshold.yaml"));
std::filesystem::remove("invalid_threshold.yaml");
}
TEST_F(YamlConfigTest, ValidateClassStyles) {
auto parser = createConfigParser();
// 测试类别样式验证
std::string config_str = R"(
input:
sources:
- type: rtsp
name: camera1
url: rtsp://example.com/stream1
inference:
model:
engine_path: /path/to/model.engine
input_shape: [3, 640, 640]
render:
window:
name: "Detection Results"
width: 1280
height: 720
class_styles:
"": #
box_color: [255, 0, 0]
text_color: [255, 255, 255]
transparency: 0.2
)";
std::ofstream config_file("invalid_class_style.yaml");
config_file << config_str;
config_file.close();
EXPECT_FALSE(parser->parse("invalid_class_style.yaml"));
std::filesystem::remove("invalid_class_style.yaml");
#include <gtest/gtest.h>
#include "pipeline/common/yaml_config_parser.hpp"
#include "test_base.hpp"
#include <fstream>
#include <filesystem>
using namespace pipeline;
class YamlConfigTest : public test::TestBase {
protected:
void SetUp() override {
test::TestBase::SetUp(); // 调用基类的SetUp
}
};
TEST_F(YamlConfigTest, LoadConfig) {
auto parser = createConfigParser();
ASSERT_TRUE(parser->parse(config_path));
const auto& config = parser->getConfig();
// 验证输入配置
ASSERT_EQ(config.input.sources.size(), 1);
EXPECT_EQ(config.input.sources[0].type, "rtsp");
EXPECT_EQ(config.input.sources[0].name, "camera1");
EXPECT_EQ(config.input.sources[0].url, "rtsp://10.0.0.17:8554/camera_test/2");
EXPECT_EQ(config.input.sources[0].buffer_size, 30);
EXPECT_EQ(config.input.max_batch_size, 4);
// 验证推理配置
EXPECT_EQ(config.inference.engine_path, "/app/models/yolov8n.engine");
ASSERT_EQ(config.inference.input_shape.size(), 3);
EXPECT_EQ(config.inference.input_shape[0], 3);
EXPECT_EQ(config.inference.input_shape[1], 640);
EXPECT_EQ(config.inference.input_shape[2], 640);
EXPECT_EQ(config.inference.precision, "FP16");
EXPECT_FLOAT_EQ(config.inference.threshold.conf, 0.5f);
EXPECT_FLOAT_EQ(config.inference.threshold.nms, 0.45f);
EXPECT_EQ(config.inference.gpu_id, 0);
EXPECT_EQ(config.inference.version, "yolov8");
ASSERT_EQ(config.inference.labels.size(), 3);
EXPECT_EQ(config.inference.labels[0], "person");
EXPECT_EQ(config.inference.labels[1], "car");
EXPECT_EQ(config.inference.labels[2], "truck");
// 验证渲染配置
EXPECT_TRUE(config.render.enable);
EXPECT_EQ(config.render.window.name, "Detection Results");
EXPECT_EQ(config.render.window.width, 1280);
EXPECT_EQ(config.render.window.height, 720);
EXPECT_FALSE(config.render.window.fullscreen);
// 默认样式
EXPECT_EQ(config.render.default_style.box_color, cv::Scalar(0, 255, 0));
EXPECT_EQ(config.render.default_style.text_color, cv::Scalar(255, 255, 255));
EXPECT_FLOAT_EQ(config.render.default_style.transparency, 0.0f);
EXPECT_EQ(config.render.default_style.box_thickness, 2);
EXPECT_DOUBLE_EQ(config.render.default_style.font_scale, 0.5);
EXPECT_EQ(config.render.default_style.font_thickness, 1);
// 类别样式
ASSERT_EQ(config.render.class_styles.size(), 3);
const auto& person_style = config.render.class_styles.at("person");
EXPECT_EQ(person_style.box_color, cv::Scalar(255, 0, 0));
EXPECT_EQ(person_style.text_color, cv::Scalar(255, 255, 255));
EXPECT_FLOAT_EQ(person_style.transparency, 0.2f);
const auto& car_style = config.render.class_styles.at("car");
EXPECT_EQ(car_style.box_color, cv::Scalar(0, 255, 0));
EXPECT_EQ(car_style.text_color, cv::Scalar(255, 255, 255));
EXPECT_FLOAT_EQ(car_style.transparency, 0.2f);
const auto& truck_style = config.render.class_styles.at("truck");
EXPECT_EQ(truck_style.box_color, cv::Scalar(0, 0, 255));
EXPECT_EQ(truck_style.text_color, cv::Scalar(255, 255, 255));
EXPECT_FLOAT_EQ(truck_style.transparency, 0.2f);
// 性能指标配置
EXPECT_TRUE(config.render.metrics.show_fps);
EXPECT_TRUE(config.render.metrics.show_inference_time);
EXPECT_TRUE(config.render.metrics.show_gpu_usage);
EXPECT_EQ(config.render.metrics.update_interval_ms, 1000);
// 验证输出配置
ASSERT_EQ(config.output.targets.size(), 2);
EXPECT_EQ(config.output.targets[0].type, "video");
EXPECT_EQ(config.output.targets[0].name, "output_video");
EXPECT_EQ(config.output.targets[0].path, "/output/result.mp4");
EXPECT_EQ(config.output.targets[0].fps, 30);
EXPECT_EQ(config.output.targets[0].codec, "h264");
EXPECT_EQ(config.output.targets[0].bitrate, 4000000);
EXPECT_EQ(config.output.targets[1].type, "rtsp");
EXPECT_EQ(config.output.targets[1].name, "output_rtsp");
EXPECT_EQ(config.output.targets[1].path, "rtsp://localhost:8554/live");
EXPECT_EQ(config.output.targets[1].fps, 30);
EXPECT_EQ(config.output.targets[1].codec, "h264");
EXPECT_EQ(config.output.targets[1].bitrate, 4000000);
// 验证日志配置
EXPECT_EQ(config.log.level, "info");
EXPECT_EQ(config.log.save_path, "logs/");
}
TEST_F(YamlConfigTest, InvalidConfig) {
auto parser = createConfigParser();
// 测试不存在的文件
EXPECT_FALSE(parser->parse("non_existent.yaml"));
}
TEST_F(YamlConfigTest, InvalidConfigs) {
auto parser = createConfigParser();
// 创建一个临时的无效配置文件用于测试
std::ofstream invalid_config("invalid_config.yaml");
invalid_config << R"(
input:
sources: [] #
)";
invalid_config.close();
EXPECT_FALSE(parser->parse("invalid_config.yaml"));
std::filesystem::remove("invalid_config.yaml");
}
TEST_F(YamlConfigTest, ValidateThresholds) {
auto parser = createConfigParser();
// 创建一个临时的无效阈值配置文件用于测试
std::ofstream config_file("invalid_threshold.yaml");
config_file << R"(
input:
sources:
- type: rtsp
name: "camera1"
url: "rtsp://10.0.0.17:8554/camera_test/2"
inference:
model:
onnx_path: "/app/models/yolov8n.onnx"
engine_path: "/app/models/yolov8n.engine"
input_shape: [3, 640, 640]
precision: "FP16"
threshold:
conf: 1.5 #
nms: 0.45
)";
config_file.close();
EXPECT_FALSE(parser->parse("invalid_threshold.yaml"));
std::filesystem::remove("invalid_threshold.yaml");
}
TEST_F(YamlConfigTest, ValidateClassStyles) {
auto parser = createConfigParser();
// 创建一个临时的无效类别样式配置文件用于测试
std::ofstream config_file("invalid_class_style.yaml");
config_file << R"(
input:
sources:
- type: rtsp
name: "camera1"
url: "rtsp://10.0.0.17:8554/camera_test/2"
inference:
model:
onnx_path: "/app/models/yolov8n.onnx"
engine_path: "/app/models/yolov8n.engine"
input_shape: [3, 640, 640]
precision: "FP16"
render:
window:
name: "Detection Results"
width: 1280
height: 720
class_styles:
"": #
box_color: [255, 0, 0]
text_color: [255, 255, 255]
transparency: 0.2
)";
config_file.close();
EXPECT_FALSE(parser->parse("invalid_class_style.yaml"));
std::filesystem::remove("invalid_class_style.yaml");
}