add new package and try to transplat rtsp of rouring

This commit is contained in:
SikongJueluo 2025-01-16 14:25:11 +08:00
parent c289d4ee7d
commit 8567012936
No known key found for this signature in database
15 changed files with 318 additions and 149 deletions

22
readerwriterqueue.lua Normal file
View File

@ -0,0 +1,22 @@
package("readerwritercircularbuffer")
set_kind("library")
set_homepage("https://github.com/cameron314/readerwriterqueue")
set_urls("https://github.com/cameron314/readerwriterqueue/archive/$(version).zip")
add_versions("16b48ae1148284e7b40abf72167206a4390a4592", "11758ab1b2ec96217245b32222edbcae76b686c9d795973907223ce0b720541f")
add_deps("cmake")
on_install(function (package)
local configs = {}
table.insert(configs, "-DCMAKE_BUILD_TYPE=" .. (package:debug() and "Debug" or "Release"))
table.insert(configs, "-DBUILD_SHARED_LIBS=" .. (package:config("shared") and "ON" or "OFF"))
import("package.tools.cmake").install(package, configs)
end)
on_test(function (package)
assert(package:has_cxxtypes(
"BlockingReaderWriterCircularBuffer",
{includes = "readerwritercircularbuffer.h"}))
end)

2
.gitignore vendored
View File

@ -1,5 +1,6 @@
# Xmake cache # Xmake cache
.xmake .xmake
.cache
.vscode .vscode
.devenv .devenv
.direnv .direnv
@ -12,6 +13,7 @@ Rouring-Vision
smart_trash_bin smart_trash_bin
nnie-yolov3-demo nnie-yolov3-demo
dtof_sensor_driver dtof_sensor_driver
compile_commands.json
# Build Cache # Build Cache
*.a *.a

View File

View File

@ -39,11 +39,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1734605856, "lastModified": 1736273305,
"narHash": "sha256-RfNqLS5f9s3aiRZIOjSbM8Vz7x2dT/7zwJhyt9TLEmE=", "narHash": "sha256-lZDdyP3Y9VBCYMTiA7rOXUuXnngLwqoZDHjTrRHeb8g=",
"owner": "cachix", "owner": "cachix",
"repo": "devenv", "repo": "devenv",
"rev": "f81cf7dc4cbfa46de11618af94e5983c5e600d8c", "rev": "cc48e044747cef4e4f69897e20a100adf5b716a3",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -260,11 +260,11 @@
}, },
"nixpkgs_3": { "nixpkgs_3": {
"locked": { "locked": {
"lastModified": 1734649271, "lastModified": 1736012469,
"narHash": "sha256-4EVBRhOjMDuGtMaofAIqzJbg4Ql7Ai0PSeuVZTHjyKQ=", "narHash": "sha256-/qlNWm/IEVVH7GfgAIyP6EsVZI6zjAx1cV5zNyrs+rI=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "d70bd19e0a38ad4790d3913bf08fcbfc9eeca507", "rev": "8f3e1f807051e32d8c95cd12b9b421623850a34d",
"type": "github" "type": "github"
}, },
"original": { "original": {

11
src/common.h Executable file
View File

@ -0,0 +1,11 @@
#ifndef __COMMON_H__
#define __COMMON_H__
#include "stdbool.h"
#include "stdint.h"
#ifndef ARRAY_LENGTH
#define ARRAY_LENGTH(arr) (sizeof(arr) / sizeof(arr[0]))
#endif
#endif

View File

@ -3,7 +3,6 @@
#include "hi_comm_snap.h" #include "hi_comm_snap.h"
#include "hi_common.h" #include "hi_common.h"
#include "mpi_snap.h" #include "mpi_snap.h"
#include "my_common.h"
#include "sample_comm.h" #include "sample_comm.h"
#include "zlog.h" #include "zlog.h"

View File

@ -5,7 +5,25 @@
extern "C"{ extern "C"{
#endif #endif
#include "stdbool.h" #include "common.h"
typedef enum {
VIDEO = 0,
H264,
H265,
JPEG,
AUDIO = 128,
AAC,
G711A, // PCMA
G711U, // PCMU
} codec_type_t;
typedef struct {
uint8_t *data;
uint32_t length;
uint64_t timestamp; // in microseconds
} video_packet_t;
bool eb3516VideoInit(void); bool eb3516VideoInit(void);
bool eb3516VideoStart(void); bool eb3516VideoStart(void);

View File

@ -9,9 +9,9 @@
#ifndef _BASE_DEVICE_SOURCE_HH #ifndef _BASE_DEVICE_SOURCE_HH
#define _BASE_DEVICE_SOURCE_HH #define _BASE_DEVICE_SOURCE_HH
#include "msgbus/ipc_msg.h" #include "isp/eb3516_video.h"
using FrameData = mpp_packet_t; using FrameData = video_packet_t;
class BaseDeviceSource { class BaseDeviceSource {
public: public:

View File

@ -0,0 +1,94 @@
/**
* @file H26XDeviceSource.cpp
* @author
* @brief live555
*
* @copyright Copyright (c) 2022-2024 OurEDA
*/
#include "H26XDeviceSource.hh"
#include <cstdlib>
#include <cstring>
#include "GroupsockHelper.hh"
unsigned H26XDeviceSource::referenceCount = 0;
EventTriggerId H26XDeviceSource::eventTriggerId = 0;
H26XDeviceSource *H26XDeviceSource::createNew(UsageEnvironment &env, Boolean isH265) {
return new H26XDeviceSource(env, isH265);
}
H26XDeviceSource::H26XDeviceSource(UsageEnvironment &env, Boolean isH265)
: FramedSource(env), isH265(isH265), frameDataQueue(3) {
if (eventTriggerId == 0) {
eventTriggerId = envir().taskScheduler().createEventTrigger([](void *clientData) {
((H26XDeviceSource *) clientData)->deliverFrame();
});
}
++referenceCount;
}
H26XDeviceSource::~H26XDeviceSource() {
--referenceCount;
if (referenceCount == 0) {
envir().taskScheduler().deleteEventTrigger(eventTriggerId);
eventTriggerId = 0;
}
FrameData frameData;
while (frameDataQueue.try_dequeue(frameData)) {
delete[] frameData.data;
}
}
codec_type_t H26XDeviceSource::getCodecType() {
return isH265 ? H265 : H264;
}
void H26XDeviceSource::signalNewFrame(const FrameData &frameData) {
if (!frameDataQueue.try_enqueue(frameData)) {
envir() << "Stream data queue full!\n";
delete[] frameData.data;
}
envir().taskScheduler().triggerEvent(eventTriggerId, this);
}
unsigned H26XDeviceSource::maxFrameSize() const {
return 0;
}
void H26XDeviceSource::doGetNextFrame() {
if (frameDataQueue.size_approx() > 0) {
deliverFrame();
}
}
void H26XDeviceSource::deliverFrame() {
// 是否准备好接收数据
if (!isCurrentlyAwaitingData()) return;
// 从队列里取出数据
FrameData buffer;
if (!frameDataQueue.try_dequeue(buffer)) return;
// 处理大小
if (buffer.length > fMaxSize) {
fFrameSize = fMaxSize;
fNumTruncatedBytes = buffer.length - fMaxSize;
} else {
fFrameSize = buffer.length;
}
// 设置这帧的时间码
if (buffer.timestamp == 0) {
gettimeofday(&fPresentationTime, NULL);
} else {
fPresentationTime.tv_sec = buffer.timestamp / 1000000;
fPresentationTime.tv_usec = buffer.timestamp % 1000000;
}
// 拷贝码流
// 去掉start code 0x00000001
int offset = buffer.data[0] == 0 && buffer.data[1] == 0 && buffer.data[2] == 0 && buffer.data[3] == 1 ? 4 : 0;
fFrameSize -= offset;
memcpy(fTo, buffer.data + offset, fFrameSize);
delete[] buffer.data;
// 通知live555
afterGetting(this);
}

View File

@ -0,0 +1,41 @@
/**
* @file H26XDeviceSource.hh
* @author
* @brief live555
*
* @copyright Copyright (c) 2022-2024 OurEDA
*/
#ifndef _H26X_DEVICE_SOURCE_HH
#define _H26X_DEVICE_SOURCE_HH
#include "FramedSource.hh"
#include "readerwritercircularbuffer.h"
#include "BaseDeviceSource.hh"
class H26XDeviceSource : public FramedSource, public BaseDeviceSource {
public:
static H26XDeviceSource *createNew(UsageEnvironment &env, Boolean isH265);
public:
virtual codec_type_t getCodecType();
virtual void signalNewFrame(const FrameData &frameData);
protected:
H26XDeviceSource(UsageEnvironment &env, Boolean isH265);
virtual ~H26XDeviceSource();
virtual unsigned maxFrameSize() const;
virtual void doGetNextFrame();
private:
void deliverFrame();
private:
Boolean isH265;
moodycamel::BlockingReaderWriterCircularBuffer<FrameData> frameDataQueue;
static unsigned referenceCount;
static EventTriggerId eventTriggerId;
};
#endif

View File

@ -24,25 +24,25 @@ LiveServerMediaSubsession::LiveServerMediaSubsession(UsageEnvironment &env, code
LiveServerMediaSubsession::~LiveServerMediaSubsession() {} LiveServerMediaSubsession::~LiveServerMediaSubsession() {}
FramedSource *LiveServerMediaSubsession::createNewStreamSource(unsigned clientSessionId, unsigned &estBitrate) { FramedSource *LiveServerMediaSubsession::createNewStreamSource(unsigned clientSessionId, unsigned &estBitrate) {
if (fCodecType == ROV_H264) { if (fCodecType == H264) {
estBitrate = 3000; // kbps, estimate estBitrate = 3000; // kbps, estimate
H26XDeviceSource *streamSource = H26XDeviceSource::createNew(envir(), False); H26XDeviceSource *streamSource = H26XDeviceSource::createNew(envir(), False);
fDeviceSource = streamSource; fDeviceSource = streamSource;
return H264VideoStreamDiscreteFramer::createNew(envir(), streamSource, True); return H264VideoStreamDiscreteFramer::createNew(envir(), streamSource, True);
} else if (fCodecType == ROV_H265) { } else if (fCodecType == H265) {
estBitrate = 2500; estBitrate = 2500;
H26XDeviceSource *streamSource = H26XDeviceSource::createNew(envir(), True); H26XDeviceSource *streamSource = H26XDeviceSource::createNew(envir(), True);
fDeviceSource = streamSource; fDeviceSource = streamSource;
return H265VideoStreamDiscreteFramer::createNew(envir(), streamSource, True); return H265VideoStreamDiscreteFramer::createNew(envir(), streamSource, True);
} else if (fCodecType == ROV_JPEG) { } else if (fCodecType == JPEG) {
estBitrate = 8000; estBitrate = 8000;
MJPEGDeviceSource *streamSource = MJPEGDeviceSource::createNew(envir()); MJPEGDeviceSource *streamSource = MJPEGDeviceSource::createNew(envir());
fDeviceSource = streamSource; fDeviceSource = streamSource;
return streamSource; return streamSource;
} else if (fCodecType == ROV_AAC) { } else if (fCodecType == AAC) {
estBitrate = 64; estBitrate = 64;
return NULL; return NULL;
} else if (fCodecType == ROV_G711A || fCodecType == ROV_G711U) { } else if (fCodecType == G711A || fCodecType == G711U) {
estBitrate = 128; estBitrate = 128;
return NULL; return NULL;
} }
@ -50,20 +50,20 @@ FramedSource *LiveServerMediaSubsession::createNewStreamSource(unsigned clientSe
} }
RTPSink *LiveServerMediaSubsession::createNewRTPSink(Groupsock *rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource *inputSource) { RTPSink *LiveServerMediaSubsession::createNewRTPSink(Groupsock *rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource *inputSource) {
if (fCodecType == ROV_H264) { if (fCodecType == H264) {
OutPacketBuffer::increaseMaxSizeTo(512 * 1024); OutPacketBuffer::increaseMaxSizeTo(512 * 1024);
return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
} else if (fCodecType == ROV_H265) { } else if (fCodecType == H265) {
OutPacketBuffer::increaseMaxSizeTo(512 * 1024); OutPacketBuffer::increaseMaxSizeTo(512 * 1024);
return H265VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); return H265VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
} else if (fCodecType == ROV_JPEG) { } else if (fCodecType == JPEG) {
OutPacketBuffer::increaseMaxSizeTo(2 * 512 * 1024); OutPacketBuffer::increaseMaxSizeTo(2 * 512 * 1024);
return JPEGVideoRTPSink::createNew(envir(), rtpGroupsock); return JPEGVideoRTPSink::createNew(envir(), rtpGroupsock);
} else if (fCodecType == ROV_AAC) { } else if (fCodecType == AAC) {
return SimpleRTPSink::createNew(envir(), rtpGroupsock, 0, 8000, "audio", "AAC", 1, False); return SimpleRTPSink::createNew(envir(), rtpGroupsock, 0, 8000, "audio", "AAC", 1, False);
} else if (fCodecType == ROV_G711A) { } else if (fCodecType == G711A) {
return SimpleRTPSink::createNew(envir(), rtpGroupsock, 0, 8000, "audio", "PCMA", 1, False); return SimpleRTPSink::createNew(envir(), rtpGroupsock, 0, 8000, "audio", "PCMA", 1, False);
} else if (fCodecType == ROV_G711U) { } else if (fCodecType == G711U) {
return SimpleRTPSink::createNew(envir(), rtpGroupsock, 0, 8000, "audio", "PCMU", 1, False); return SimpleRTPSink::createNew(envir(), rtpGroupsock, 0, 8000, "audio", "PCMU", 1, False);
} }
return NULL; return NULL;

View File

@ -1,24 +0,0 @@
#
# rtsp/build.mk
# rtsp 模块构建脚本
#
# Copyright (c) 2022-2024 OurEDA
#
TARGET := rtsp
CUR_INCS := -I$(CUR_SRC_DIR) \
-I$(ROV_HOME)/configs \
-I$(ROV_HOME)/common \
-I$(ROV_HOME)/modules \
-I$(ROV_HOME)/modules/tcp/generated \
-I$(ROV_OUT_DIR)/include \
-I$(ROV_OUT_DIR)/include/groupsock \
-I$(ROV_OUT_DIR)/include/BasicUsageEnvironment \
-I$(ROV_OUT_DIR)/include/UsageEnvironment \
-I$(ROV_OUT_DIR)/include/liveMedia
CUR_SRCS := $(wildcard $(CUR_SRC_DIR)/*.c) \
$(wildcard $(CUR_SRC_DIR)/*.cpp)
CUR_STATIC_LIBS := -lliveMedia -lUsageEnvironment -lBasicUsageEnvironment -lgroupsock
CUR_DEPS := live555 readerwriterqueue mpp
$(call build_module,$(TARGET))

View File

@ -2,23 +2,25 @@
* @file rtsp_server.cpp * @file rtsp_server.cpp
* @author * @author
* @brief live555 RTSP * @brief live555 RTSP
* *
* @copyright Copyright (c) 2022-2024 OurEDA * @copyright Copyright (c) 2022-2024 OurEDA
*/ */
#include <stdio.h>
#include <pthread.h>
#include "BasicUsageEnvironment.hh" #include "BasicUsageEnvironment.hh"
#include "GroupsockHelper.hh" #include "GroupsockHelper.hh"
#include "liveMedia.hh" #include "liveMedia.hh"
#include <cstdio>
#include <pthread.h>
#include <stdio.h>
#include "module_init.h"
#include "platform/log.h"
#include "msgbus/ipc.h"
#include "LiveServerMediaSubsession.hh" #include "LiveServerMediaSubsession.hh"
#include "zlog.h"
static zlog_category_t *log_rtsp = nullptr;
static const char *stream_name = "main"; static const char *stream_name = "main";
static const char *description = "Session streamed by the main camera of rouring ROV"; static const char *description =
"Session streamed by the main camera of Hi3516";
static volatile char stop_running; static volatile char stop_running;
static pthread_t rtsp_thread; static pthread_t rtsp_thread;
@ -29,104 +31,115 @@ static LiveServerMediaSubsession *videoSubsession;
static LiveServerMediaSubsession *audioSubsession; static LiveServerMediaSubsession *audioSubsession;
static void announceURL(RTSPServer *rtspServer, ServerMediaSession *sms) { static void announceURL(RTSPServer *rtspServer, ServerMediaSession *sms) {
if (rtspServer == NULL || sms == NULL) return; // sanuty check if (rtspServer == NULL || sms == NULL)
UsageEnvironment &env = rtspServer->envir(); return; // sanuty check
env << "Play this stream using the URL "; UsageEnvironment &env = rtspServer->envir();
if (weHaveAnIPv4Address(env)) { env << "Play this stream using the URL ";
char *url = rtspServer->ipv4rtspURL(sms); if (weHaveAnIPv4Address(env)) {
env << "\"" << url << "\""; char *url = rtspServer->ipv4rtspURL(sms);
delete[] url; env << "\"" << url << "\"";
if (weHaveAnIPv6Address(env)) env << " or "; delete[] url;
} if (weHaveAnIPv6Address(env))
if (weHaveAnIPv6Address(env)) { env << " or ";
char *url = rtspServer->ipv6rtspURL(sms); }
env << "\"" << url << "\""; if (weHaveAnIPv6Address(env)) {
delete[] url; char *url = rtspServer->ipv6rtspURL(sms);
} env << "\"" << url << "\"";
env << "\n"; delete[] url;
}
env << "\n";
} }
static void *rtsp_loop(void *arg) { static void *rtsp_loop([[maybe_unused]] void *arg) {
UNUSED(arg); env->taskScheduler().doEventLoop(&stop_running);
env->taskScheduler().doEventLoop(&stop_running); return nullptr;
return nullptr;
} }
static void on_msg_received(ipc_request_t *req) { static void on_msg_received(ipc_request_t *req) {
if (req->id == IPC_MSG_ID_RTSP_STREAM_VIDEO) { if (req->id == IPC_MSG_ID_RTSP_STREAM_VIDEO) {
const ipc_msg_rtsp_stream_t *packet = (const ipc_msg_rtsp_stream_t *)req->msg; const ipc_msg_rtsp_stream_t *packet =
if (videoSubsession == nullptr || videoSubsession->deviceSource() == nullptr) { (const ipc_msg_rtsp_stream_t *)req->msg;
delete[] packet->data; if (videoSubsession == nullptr ||
ipc_reply(req, nullptr); videoSubsession->deviceSource() == nullptr) {
return; delete[] packet->data;
} ipc_reply(req, nullptr);
videoSubsession->deviceSource()->signalNewFrame(*packet); return;
ipc_reply(req, nullptr);
} }
videoSubsession->deviceSource()->signalNewFrame(*packet);
ipc_reply(req, nullptr);
}
} }
static bool rtsp_start() { static bool rtsp_start() {
LOG_DEBUG("Starting RTSP server...\n"); // Get logger
ipc_register_callback(MOD_RTSP, on_msg_received); log_rtsp = zlog_get_category("eb3516_rtsp");
if (!log_rtsp) {
std::printf("Error: Failed to start rtsp!!!\n");
return false;
}
zlog_debug(log_rtsp, "Starting RTSP server...\n");
ipc_register_callback(MOD_RTSP, on_msg_received);
scheduler = BasicTaskScheduler::createNew(); scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler); env = BasicUsageEnvironment::createNew(*scheduler);
rtspServer = RTSPServer::createNew(*env, 554, nullptr, 10U); rtspServer = RTSPServer::createNew(*env, 554, nullptr, 10U);
if (rtspServer == nullptr) { if (rtspServer == nullptr) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
return false; return false;
} }
ipc_msg_mpp_stream_info_req stream_info_req; ipc_msg_mpp_stream_info_req stream_info_req;
stream_info_req.num = 0; stream_info_req.num = 0;
ipc_request_t req = IPC_REQUEST_INIT(MOD_MPP); ipc_request_t req = IPC_REQUEST_INIT(MOD_MPP);
req.id = IPC_MSG_ID_MPP_STREAM_INFO; req.id = IPC_MSG_ID_MPP_STREAM_INFO;
req.msg = (const ipc_msg_t *)&stream_info_req; req.msg = (const ipc_msg_t *)&stream_info_req;
req.length = sizeof(ipc_msg_mpp_stream_info_req); req.length = sizeof(ipc_msg_mpp_stream_info_req);
int result = ipc_send(&req); int result = ipc_send(&req);
if (result != IPC_OK) { if (result != IPC_OK) {
*env << "Failed to get live stream params\n"; *env << "Failed to get live stream params\n";
return false; return false;
} }
ipc_msg_mpp_stream_info_res *stream_info_res = &req.res->msg.mpp_stream_info_res; ipc_msg_mpp_stream_info_res *stream_info_res =
if (stream_info_res->video_codec < ROV_AUDIO) { &req.res->msg.mpp_stream_info_res;
videoSubsession = LiveServerMediaSubsession::createNew(*env, stream_info_res->video_codec); if (stream_info_res->video_codec < ROV_AUDIO) {
} videoSubsession = LiveServerMediaSubsession::createNew(
if (stream_info_res->audio_codec > ROV_AUDIO) { *env, stream_info_res->video_codec);
audioSubsession = LiveServerMediaSubsession::createNew(*env, stream_info_res->audio_codec); }
} if (stream_info_res->audio_codec > ROV_AUDIO) {
free(req.res); audioSubsession = LiveServerMediaSubsession::createNew(
*env, stream_info_res->audio_codec);
}
free(req.res);
ServerMediaSession *sms = ServerMediaSession::createNew(*env, stream_name, stream_name, description); ServerMediaSession *sms = ServerMediaSession::createNew(
if (videoSubsession != nullptr) { *env, stream_name, stream_name, description);
sms->addSubsession(videoSubsession); if (videoSubsession != nullptr) {
} sms->addSubsession(videoSubsession);
if (audioSubsession != nullptr) { }
sms->addSubsession(audioSubsession); if (audioSubsession != nullptr) {
} sms->addSubsession(audioSubsession);
rtspServer->addServerMediaSession(sms); }
announceURL(rtspServer, sms); rtspServer->addServerMediaSession(sms);
announceURL(rtspServer, sms);
stop_running = 0; stop_running = 0;
result = pthread_create(&rtsp_thread, NULL, rtsp_loop, NULL); result = pthread_create(&rtsp_thread, NULL, rtsp_loop, NULL);
if (result != 0) { if (result != 0) {
*env << "Create RTSP server thread failed, result: " << result << "\n"; *env << "Create RTSP server thread failed, result: " << result << "\n";
return false; return false;
} }
return true; return true;
} }
static void rtsp_stop() { static void rtsp_stop() {
stop_running = 1; stop_running = 1;
pthread_join(rtsp_thread, NULL); pthread_join(rtsp_thread, NULL);
videoSubsession = audioSubsession = nullptr; videoSubsession = audioSubsession = nullptr;
// 若销毁RTSP服务端时客户端未断开连接则不会关闭连接, 所以在销毁之前关闭一下 // 若销毁RTSP服务端时客户端未断开连接则不会关闭连接, 所以在销毁之前关闭一下
rtspServer->closeAllClientSessionsForServerMediaSession(stream_name); rtspServer->closeAllClientSessionsForServerMediaSession(stream_name);
Medium::close(rtspServer); Medium::close(rtspServer);
if (env && !env->reclaim()) { if (env && !env->reclaim()) {
fprintf(stderr, "!!! UsageEnvironment release failed !!!\n"); fprintf(stderr, "!!! UsageEnvironment release failed !!!\n");
} }
delete scheduler; delete scheduler;
} }
MODULE_RUN(rtsp_start, rtsp_stop, 09);

View File

@ -1,8 +0,0 @@
#ifndef __MY_COMMON_H__
#define __MY_COMMON_H__
#ifndef ARRAY_LENGTH
#define ARRAY_LENGTH(arr) (sizeof(arr) / sizeof(arr[0]))
#endif
#endif

View File

@ -1,5 +1,5 @@
add_rules("mode.debug", "mode.release") add_rules("mode.debug", "mode.release")
includes("Hi3516_SDK.lua", "live555.lua") includes("Hi3516_SDK.lua", "live555.lua", " readerwriterqueue.lua")
--- Add Cross Compile Toolchain --- Add Cross Compile Toolchain
toolchain("arm-himix200-linux") toolchain("arm-himix200-linux")
@ -20,13 +20,14 @@ add_requires(
"zlog 1.2.17", "zlog 1.2.17",
"mongoose 7.15", "mongoose 7.15",
"live555 2024.11.28", "live555 2024.11.28",
"readerwriterqueue 16b48ae1148284e7b40abf72167206a4390a4592",
{ system = false } { system = false }
) )
target("ISP") target("ISP")
set_kind("static") set_kind("static")
add_files("src/modules/isp/*.c") add_files("src/modules/isp/*.c")
add_includedirs("src") add_includedirs("src", "src/modules")
add_deps("sample_common") add_deps("sample_common")
add_packages("zlog") add_packages("zlog")
target_end() target_end()
@ -34,7 +35,7 @@ target_end()
target("NNIE") target("NNIE")
set_kind("static") set_kind("static")
add_files("src/modules/nnie/*.c") add_files("src/modules/nnie/*.c")
add_includedirs("src") add_includedirs("src", "src/modules")
add_deps("sample_common", "sample_svp") add_deps("sample_common", "sample_svp")
add_packages("zlog") add_packages("zlog")
target_end() target_end()
@ -42,8 +43,8 @@ target_end()
target("RTSP") target("RTSP")
set_kind("static") set_kind("static")
add_files("src/modules/rtsp/*.cpp") add_files("src/modules/rtsp/*.cpp")
add_includedirs("src") add_includedirs("src", "src/modules")
add_packages("live555", "zlog") add_packages("live555", "zlog", "readerwriterqueue")
add_links("pthread") add_links("pthread")
target_end() target_end()