add new package and try to transplat rtsp of rouring
This commit is contained in:
parent
c289d4ee7d
commit
8567012936
|
@ -0,0 +1,22 @@
|
|||
package("readerwritercircularbuffer")
|
||||
|
||||
set_kind("library")
|
||||
set_homepage("https://github.com/cameron314/readerwriterqueue")
|
||||
|
||||
set_urls("https://github.com/cameron314/readerwriterqueue/archive/$(version).zip")
|
||||
add_versions("16b48ae1148284e7b40abf72167206a4390a4592", "11758ab1b2ec96217245b32222edbcae76b686c9d795973907223ce0b720541f")
|
||||
|
||||
add_deps("cmake")
|
||||
|
||||
on_install(function (package)
|
||||
local configs = {}
|
||||
table.insert(configs, "-DCMAKE_BUILD_TYPE=" .. (package:debug() and "Debug" or "Release"))
|
||||
table.insert(configs, "-DBUILD_SHARED_LIBS=" .. (package:config("shared") and "ON" or "OFF"))
|
||||
import("package.tools.cmake").install(package, configs)
|
||||
end)
|
||||
|
||||
on_test(function (package)
|
||||
assert(package:has_cxxtypes(
|
||||
"BlockingReaderWriterCircularBuffer",
|
||||
{includes = "readerwritercircularbuffer.h"}))
|
||||
end)
|
|
@ -1,5 +1,6 @@
|
|||
# Xmake cache
|
||||
.xmake
|
||||
.cache
|
||||
.vscode
|
||||
.devenv
|
||||
.direnv
|
||||
|
@ -12,6 +13,7 @@ Rouring-Vision
|
|||
smart_trash_bin
|
||||
nnie-yolov3-demo
|
||||
dtof_sensor_driver
|
||||
compile_commands.json
|
||||
|
||||
# Build Cache
|
||||
*.a
|
||||
|
|
12
flake.lock
12
flake.lock
|
@ -39,11 +39,11 @@
|
|||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1734605856,
|
||||
"narHash": "sha256-RfNqLS5f9s3aiRZIOjSbM8Vz7x2dT/7zwJhyt9TLEmE=",
|
||||
"lastModified": 1736273305,
|
||||
"narHash": "sha256-lZDdyP3Y9VBCYMTiA7rOXUuXnngLwqoZDHjTrRHeb8g=",
|
||||
"owner": "cachix",
|
||||
"repo": "devenv",
|
||||
"rev": "f81cf7dc4cbfa46de11618af94e5983c5e600d8c",
|
||||
"rev": "cc48e044747cef4e4f69897e20a100adf5b716a3",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -260,11 +260,11 @@
|
|||
},
|
||||
"nixpkgs_3": {
|
||||
"locked": {
|
||||
"lastModified": 1734649271,
|
||||
"narHash": "sha256-4EVBRhOjMDuGtMaofAIqzJbg4Ql7Ai0PSeuVZTHjyKQ=",
|
||||
"lastModified": 1736012469,
|
||||
"narHash": "sha256-/qlNWm/IEVVH7GfgAIyP6EsVZI6zjAx1cV5zNyrs+rI=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "d70bd19e0a38ad4790d3913bf08fcbfc9eeca507",
|
||||
"rev": "8f3e1f807051e32d8c95cd12b9b421623850a34d",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
#ifndef __COMMON_H__
|
||||
#define __COMMON_H__
|
||||
|
||||
#include "stdbool.h"
|
||||
#include "stdint.h"
|
||||
|
||||
#ifndef ARRAY_LENGTH
|
||||
#define ARRAY_LENGTH(arr) (sizeof(arr) / sizeof(arr[0]))
|
||||
#endif
|
||||
|
||||
#endif
|
|
@ -3,7 +3,6 @@
|
|||
#include "hi_comm_snap.h"
|
||||
#include "hi_common.h"
|
||||
#include "mpi_snap.h"
|
||||
#include "my_common.h"
|
||||
#include "sample_comm.h"
|
||||
#include "zlog.h"
|
||||
|
||||
|
|
|
@ -5,7 +5,25 @@
|
|||
extern "C"{
|
||||
#endif
|
||||
|
||||
#include "stdbool.h"
|
||||
#include "common.h"
|
||||
|
||||
typedef enum {
|
||||
VIDEO = 0,
|
||||
H264,
|
||||
H265,
|
||||
JPEG,
|
||||
|
||||
AUDIO = 128,
|
||||
AAC,
|
||||
G711A, // PCMA
|
||||
G711U, // PCMU
|
||||
} codec_type_t;
|
||||
|
||||
typedef struct {
|
||||
uint8_t *data;
|
||||
uint32_t length;
|
||||
uint64_t timestamp; // in microseconds
|
||||
} video_packet_t;
|
||||
|
||||
bool eb3516VideoInit(void);
|
||||
bool eb3516VideoStart(void);
|
||||
|
|
|
@ -9,9 +9,9 @@
|
|||
#ifndef _BASE_DEVICE_SOURCE_HH
|
||||
#define _BASE_DEVICE_SOURCE_HH
|
||||
|
||||
#include "msgbus/ipc_msg.h"
|
||||
#include "isp/eb3516_video.h"
|
||||
|
||||
using FrameData = mpp_packet_t;
|
||||
using FrameData = video_packet_t;
|
||||
|
||||
class BaseDeviceSource {
|
||||
public:
|
||||
|
|
|
@ -0,0 +1,94 @@
|
|||
/**
|
||||
* @file H26XDeviceSource.cpp
|
||||
* @author 吴晨
|
||||
* @brief 适配 live555
|
||||
*
|
||||
* @copyright Copyright (c) 2022-2024 OurEDA
|
||||
*/
|
||||
|
||||
#include "H26XDeviceSource.hh"
|
||||
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
#include "GroupsockHelper.hh"
|
||||
|
||||
unsigned H26XDeviceSource::referenceCount = 0;
|
||||
EventTriggerId H26XDeviceSource::eventTriggerId = 0;
|
||||
|
||||
H26XDeviceSource *H26XDeviceSource::createNew(UsageEnvironment &env, Boolean isH265) {
|
||||
return new H26XDeviceSource(env, isH265);
|
||||
}
|
||||
|
||||
H26XDeviceSource::H26XDeviceSource(UsageEnvironment &env, Boolean isH265)
|
||||
: FramedSource(env), isH265(isH265), frameDataQueue(3) {
|
||||
if (eventTriggerId == 0) {
|
||||
eventTriggerId = envir().taskScheduler().createEventTrigger([](void *clientData) {
|
||||
((H26XDeviceSource *) clientData)->deliverFrame();
|
||||
});
|
||||
}
|
||||
++referenceCount;
|
||||
}
|
||||
|
||||
H26XDeviceSource::~H26XDeviceSource() {
|
||||
--referenceCount;
|
||||
if (referenceCount == 0) {
|
||||
envir().taskScheduler().deleteEventTrigger(eventTriggerId);
|
||||
eventTriggerId = 0;
|
||||
}
|
||||
FrameData frameData;
|
||||
while (frameDataQueue.try_dequeue(frameData)) {
|
||||
delete[] frameData.data;
|
||||
}
|
||||
}
|
||||
|
||||
codec_type_t H26XDeviceSource::getCodecType() {
|
||||
return isH265 ? H265 : H264;
|
||||
}
|
||||
|
||||
void H26XDeviceSource::signalNewFrame(const FrameData &frameData) {
|
||||
if (!frameDataQueue.try_enqueue(frameData)) {
|
||||
envir() << "Stream data queue full!\n";
|
||||
delete[] frameData.data;
|
||||
}
|
||||
envir().taskScheduler().triggerEvent(eventTriggerId, this);
|
||||
}
|
||||
|
||||
unsigned H26XDeviceSource::maxFrameSize() const {
|
||||
return 0;
|
||||
}
|
||||
|
||||
void H26XDeviceSource::doGetNextFrame() {
|
||||
if (frameDataQueue.size_approx() > 0) {
|
||||
deliverFrame();
|
||||
}
|
||||
}
|
||||
|
||||
void H26XDeviceSource::deliverFrame() {
|
||||
// 是否准备好接收数据
|
||||
if (!isCurrentlyAwaitingData()) return;
|
||||
// 从队列里取出数据
|
||||
FrameData buffer;
|
||||
if (!frameDataQueue.try_dequeue(buffer)) return;
|
||||
// 处理大小
|
||||
if (buffer.length > fMaxSize) {
|
||||
fFrameSize = fMaxSize;
|
||||
fNumTruncatedBytes = buffer.length - fMaxSize;
|
||||
} else {
|
||||
fFrameSize = buffer.length;
|
||||
}
|
||||
// 设置这帧的时间码
|
||||
if (buffer.timestamp == 0) {
|
||||
gettimeofday(&fPresentationTime, NULL);
|
||||
} else {
|
||||
fPresentationTime.tv_sec = buffer.timestamp / 1000000;
|
||||
fPresentationTime.tv_usec = buffer.timestamp % 1000000;
|
||||
}
|
||||
// 拷贝码流
|
||||
// 去掉start code 0x00000001
|
||||
int offset = buffer.data[0] == 0 && buffer.data[1] == 0 && buffer.data[2] == 0 && buffer.data[3] == 1 ? 4 : 0;
|
||||
fFrameSize -= offset;
|
||||
memcpy(fTo, buffer.data + offset, fFrameSize);
|
||||
delete[] buffer.data;
|
||||
// 通知live555
|
||||
afterGetting(this);
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/**
|
||||
* @file H26XDeviceSource.hh
|
||||
* @author 吴晨
|
||||
* @brief 适配 live555
|
||||
*
|
||||
* @copyright Copyright (c) 2022-2024 OurEDA
|
||||
*/
|
||||
|
||||
#ifndef _H26X_DEVICE_SOURCE_HH
|
||||
#define _H26X_DEVICE_SOURCE_HH
|
||||
|
||||
#include "FramedSource.hh"
|
||||
#include "readerwritercircularbuffer.h"
|
||||
|
||||
#include "BaseDeviceSource.hh"
|
||||
|
||||
class H26XDeviceSource : public FramedSource, public BaseDeviceSource {
|
||||
public:
|
||||
static H26XDeviceSource *createNew(UsageEnvironment &env, Boolean isH265);
|
||||
|
||||
public:
|
||||
virtual codec_type_t getCodecType();
|
||||
virtual void signalNewFrame(const FrameData &frameData);
|
||||
|
||||
protected:
|
||||
H26XDeviceSource(UsageEnvironment &env, Boolean isH265);
|
||||
virtual ~H26XDeviceSource();
|
||||
virtual unsigned maxFrameSize() const;
|
||||
virtual void doGetNextFrame();
|
||||
|
||||
private:
|
||||
void deliverFrame();
|
||||
|
||||
private:
|
||||
Boolean isH265;
|
||||
moodycamel::BlockingReaderWriterCircularBuffer<FrameData> frameDataQueue;
|
||||
static unsigned referenceCount;
|
||||
static EventTriggerId eventTriggerId;
|
||||
};
|
||||
|
||||
#endif
|
|
@ -24,25 +24,25 @@ LiveServerMediaSubsession::LiveServerMediaSubsession(UsageEnvironment &env, code
|
|||
LiveServerMediaSubsession::~LiveServerMediaSubsession() {}
|
||||
|
||||
FramedSource *LiveServerMediaSubsession::createNewStreamSource(unsigned clientSessionId, unsigned &estBitrate) {
|
||||
if (fCodecType == ROV_H264) {
|
||||
if (fCodecType == H264) {
|
||||
estBitrate = 3000; // kbps, estimate
|
||||
H26XDeviceSource *streamSource = H26XDeviceSource::createNew(envir(), False);
|
||||
fDeviceSource = streamSource;
|
||||
return H264VideoStreamDiscreteFramer::createNew(envir(), streamSource, True);
|
||||
} else if (fCodecType == ROV_H265) {
|
||||
} else if (fCodecType == H265) {
|
||||
estBitrate = 2500;
|
||||
H26XDeviceSource *streamSource = H26XDeviceSource::createNew(envir(), True);
|
||||
fDeviceSource = streamSource;
|
||||
return H265VideoStreamDiscreteFramer::createNew(envir(), streamSource, True);
|
||||
} else if (fCodecType == ROV_JPEG) {
|
||||
} else if (fCodecType == JPEG) {
|
||||
estBitrate = 8000;
|
||||
MJPEGDeviceSource *streamSource = MJPEGDeviceSource::createNew(envir());
|
||||
fDeviceSource = streamSource;
|
||||
return streamSource;
|
||||
} else if (fCodecType == ROV_AAC) {
|
||||
} else if (fCodecType == AAC) {
|
||||
estBitrate = 64;
|
||||
return NULL;
|
||||
} else if (fCodecType == ROV_G711A || fCodecType == ROV_G711U) {
|
||||
} else if (fCodecType == G711A || fCodecType == G711U) {
|
||||
estBitrate = 128;
|
||||
return NULL;
|
||||
}
|
||||
|
@ -50,20 +50,20 @@ FramedSource *LiveServerMediaSubsession::createNewStreamSource(unsigned clientSe
|
|||
}
|
||||
|
||||
RTPSink *LiveServerMediaSubsession::createNewRTPSink(Groupsock *rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource *inputSource) {
|
||||
if (fCodecType == ROV_H264) {
|
||||
if (fCodecType == H264) {
|
||||
OutPacketBuffer::increaseMaxSizeTo(512 * 1024);
|
||||
return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
|
||||
} else if (fCodecType == ROV_H265) {
|
||||
} else if (fCodecType == H265) {
|
||||
OutPacketBuffer::increaseMaxSizeTo(512 * 1024);
|
||||
return H265VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
|
||||
} else if (fCodecType == ROV_JPEG) {
|
||||
} else if (fCodecType == JPEG) {
|
||||
OutPacketBuffer::increaseMaxSizeTo(2 * 512 * 1024);
|
||||
return JPEGVideoRTPSink::createNew(envir(), rtpGroupsock);
|
||||
} else if (fCodecType == ROV_AAC) {
|
||||
} else if (fCodecType == AAC) {
|
||||
return SimpleRTPSink::createNew(envir(), rtpGroupsock, 0, 8000, "audio", "AAC", 1, False);
|
||||
} else if (fCodecType == ROV_G711A) {
|
||||
} else if (fCodecType == G711A) {
|
||||
return SimpleRTPSink::createNew(envir(), rtpGroupsock, 0, 8000, "audio", "PCMA", 1, False);
|
||||
} else if (fCodecType == ROV_G711U) {
|
||||
} else if (fCodecType == G711U) {
|
||||
return SimpleRTPSink::createNew(envir(), rtpGroupsock, 0, 8000, "audio", "PCMU", 1, False);
|
||||
}
|
||||
return NULL;
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
#
|
||||
# rtsp/build.mk
|
||||
# rtsp 模块构建脚本
|
||||
#
|
||||
# Copyright (c) 2022-2024 OurEDA
|
||||
#
|
||||
|
||||
TARGET := rtsp
|
||||
CUR_INCS := -I$(CUR_SRC_DIR) \
|
||||
-I$(ROV_HOME)/configs \
|
||||
-I$(ROV_HOME)/common \
|
||||
-I$(ROV_HOME)/modules \
|
||||
-I$(ROV_HOME)/modules/tcp/generated \
|
||||
-I$(ROV_OUT_DIR)/include \
|
||||
-I$(ROV_OUT_DIR)/include/groupsock \
|
||||
-I$(ROV_OUT_DIR)/include/BasicUsageEnvironment \
|
||||
-I$(ROV_OUT_DIR)/include/UsageEnvironment \
|
||||
-I$(ROV_OUT_DIR)/include/liveMedia
|
||||
CUR_SRCS := $(wildcard $(CUR_SRC_DIR)/*.c) \
|
||||
$(wildcard $(CUR_SRC_DIR)/*.cpp)
|
||||
CUR_STATIC_LIBS := -lliveMedia -lUsageEnvironment -lBasicUsageEnvironment -lgroupsock
|
||||
CUR_DEPS := live555 readerwriterqueue mpp
|
||||
|
||||
$(call build_module,$(TARGET))
|
|
@ -2,23 +2,25 @@
|
|||
* @file rtsp_server.cpp
|
||||
* @author 吴晨
|
||||
* @brief 基于 live555 的 RTSP 服务器
|
||||
*
|
||||
*
|
||||
* @copyright Copyright (c) 2022-2024 OurEDA
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <pthread.h>
|
||||
#include "BasicUsageEnvironment.hh"
|
||||
#include "GroupsockHelper.hh"
|
||||
#include "liveMedia.hh"
|
||||
#include <cstdio>
|
||||
#include <pthread.h>
|
||||
#include <stdio.h>
|
||||
|
||||
#include "module_init.h"
|
||||
#include "platform/log.h"
|
||||
#include "msgbus/ipc.h"
|
||||
#include "LiveServerMediaSubsession.hh"
|
||||
#include "zlog.h"
|
||||
|
||||
static zlog_category_t *log_rtsp = nullptr;
|
||||
|
||||
static const char *stream_name = "main";
|
||||
static const char *description = "Session streamed by the main camera of rouring ROV";
|
||||
static const char *description =
|
||||
"Session streamed by the main camera of Hi3516";
|
||||
|
||||
static volatile char stop_running;
|
||||
static pthread_t rtsp_thread;
|
||||
|
@ -29,104 +31,115 @@ static LiveServerMediaSubsession *videoSubsession;
|
|||
static LiveServerMediaSubsession *audioSubsession;
|
||||
|
||||
static void announceURL(RTSPServer *rtspServer, ServerMediaSession *sms) {
|
||||
if (rtspServer == NULL || sms == NULL) return; // sanuty check
|
||||
UsageEnvironment &env = rtspServer->envir();
|
||||
env << "Play this stream using the URL ";
|
||||
if (weHaveAnIPv4Address(env)) {
|
||||
char *url = rtspServer->ipv4rtspURL(sms);
|
||||
env << "\"" << url << "\"";
|
||||
delete[] url;
|
||||
if (weHaveAnIPv6Address(env)) env << " or ";
|
||||
}
|
||||
if (weHaveAnIPv6Address(env)) {
|
||||
char *url = rtspServer->ipv6rtspURL(sms);
|
||||
env << "\"" << url << "\"";
|
||||
delete[] url;
|
||||
}
|
||||
env << "\n";
|
||||
if (rtspServer == NULL || sms == NULL)
|
||||
return; // sanuty check
|
||||
UsageEnvironment &env = rtspServer->envir();
|
||||
env << "Play this stream using the URL ";
|
||||
if (weHaveAnIPv4Address(env)) {
|
||||
char *url = rtspServer->ipv4rtspURL(sms);
|
||||
env << "\"" << url << "\"";
|
||||
delete[] url;
|
||||
if (weHaveAnIPv6Address(env))
|
||||
env << " or ";
|
||||
}
|
||||
if (weHaveAnIPv6Address(env)) {
|
||||
char *url = rtspServer->ipv6rtspURL(sms);
|
||||
env << "\"" << url << "\"";
|
||||
delete[] url;
|
||||
}
|
||||
env << "\n";
|
||||
}
|
||||
|
||||
static void *rtsp_loop(void *arg) {
|
||||
UNUSED(arg);
|
||||
env->taskScheduler().doEventLoop(&stop_running);
|
||||
return nullptr;
|
||||
static void *rtsp_loop([[maybe_unused]] void *arg) {
|
||||
env->taskScheduler().doEventLoop(&stop_running);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
static void on_msg_received(ipc_request_t *req) {
|
||||
if (req->id == IPC_MSG_ID_RTSP_STREAM_VIDEO) {
|
||||
const ipc_msg_rtsp_stream_t *packet = (const ipc_msg_rtsp_stream_t *)req->msg;
|
||||
if (videoSubsession == nullptr || videoSubsession->deviceSource() == nullptr) {
|
||||
delete[] packet->data;
|
||||
ipc_reply(req, nullptr);
|
||||
return;
|
||||
}
|
||||
videoSubsession->deviceSource()->signalNewFrame(*packet);
|
||||
ipc_reply(req, nullptr);
|
||||
if (req->id == IPC_MSG_ID_RTSP_STREAM_VIDEO) {
|
||||
const ipc_msg_rtsp_stream_t *packet =
|
||||
(const ipc_msg_rtsp_stream_t *)req->msg;
|
||||
if (videoSubsession == nullptr ||
|
||||
videoSubsession->deviceSource() == nullptr) {
|
||||
delete[] packet->data;
|
||||
ipc_reply(req, nullptr);
|
||||
return;
|
||||
}
|
||||
videoSubsession->deviceSource()->signalNewFrame(*packet);
|
||||
ipc_reply(req, nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
static bool rtsp_start() {
|
||||
LOG_DEBUG("Starting RTSP server...\n");
|
||||
ipc_register_callback(MOD_RTSP, on_msg_received);
|
||||
// Get logger
|
||||
log_rtsp = zlog_get_category("eb3516_rtsp");
|
||||
if (!log_rtsp) {
|
||||
std::printf("Error: Failed to start rtsp!!!\n");
|
||||
return false;
|
||||
}
|
||||
zlog_debug(log_rtsp, "Starting RTSP server...\n");
|
||||
ipc_register_callback(MOD_RTSP, on_msg_received);
|
||||
|
||||
scheduler = BasicTaskScheduler::createNew();
|
||||
env = BasicUsageEnvironment::createNew(*scheduler);
|
||||
rtspServer = RTSPServer::createNew(*env, 554, nullptr, 10U);
|
||||
if (rtspServer == nullptr) {
|
||||
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
|
||||
return false;
|
||||
}
|
||||
scheduler = BasicTaskScheduler::createNew();
|
||||
env = BasicUsageEnvironment::createNew(*scheduler);
|
||||
rtspServer = RTSPServer::createNew(*env, 554, nullptr, 10U);
|
||||
if (rtspServer == nullptr) {
|
||||
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
|
||||
return false;
|
||||
}
|
||||
|
||||
ipc_msg_mpp_stream_info_req stream_info_req;
|
||||
stream_info_req.num = 0;
|
||||
ipc_request_t req = IPC_REQUEST_INIT(MOD_MPP);
|
||||
req.id = IPC_MSG_ID_MPP_STREAM_INFO;
|
||||
req.msg = (const ipc_msg_t *)&stream_info_req;
|
||||
req.length = sizeof(ipc_msg_mpp_stream_info_req);
|
||||
int result = ipc_send(&req);
|
||||
if (result != IPC_OK) {
|
||||
*env << "Failed to get live stream params\n";
|
||||
return false;
|
||||
}
|
||||
ipc_msg_mpp_stream_info_res *stream_info_res = &req.res->msg.mpp_stream_info_res;
|
||||
if (stream_info_res->video_codec < ROV_AUDIO) {
|
||||
videoSubsession = LiveServerMediaSubsession::createNew(*env, stream_info_res->video_codec);
|
||||
}
|
||||
if (stream_info_res->audio_codec > ROV_AUDIO) {
|
||||
audioSubsession = LiveServerMediaSubsession::createNew(*env, stream_info_res->audio_codec);
|
||||
}
|
||||
free(req.res);
|
||||
ipc_msg_mpp_stream_info_req stream_info_req;
|
||||
stream_info_req.num = 0;
|
||||
ipc_request_t req = IPC_REQUEST_INIT(MOD_MPP);
|
||||
req.id = IPC_MSG_ID_MPP_STREAM_INFO;
|
||||
req.msg = (const ipc_msg_t *)&stream_info_req;
|
||||
req.length = sizeof(ipc_msg_mpp_stream_info_req);
|
||||
int result = ipc_send(&req);
|
||||
if (result != IPC_OK) {
|
||||
*env << "Failed to get live stream params\n";
|
||||
return false;
|
||||
}
|
||||
ipc_msg_mpp_stream_info_res *stream_info_res =
|
||||
&req.res->msg.mpp_stream_info_res;
|
||||
if (stream_info_res->video_codec < ROV_AUDIO) {
|
||||
videoSubsession = LiveServerMediaSubsession::createNew(
|
||||
*env, stream_info_res->video_codec);
|
||||
}
|
||||
if (stream_info_res->audio_codec > ROV_AUDIO) {
|
||||
audioSubsession = LiveServerMediaSubsession::createNew(
|
||||
*env, stream_info_res->audio_codec);
|
||||
}
|
||||
free(req.res);
|
||||
|
||||
ServerMediaSession *sms = ServerMediaSession::createNew(*env, stream_name, stream_name, description);
|
||||
if (videoSubsession != nullptr) {
|
||||
sms->addSubsession(videoSubsession);
|
||||
}
|
||||
if (audioSubsession != nullptr) {
|
||||
sms->addSubsession(audioSubsession);
|
||||
}
|
||||
rtspServer->addServerMediaSession(sms);
|
||||
announceURL(rtspServer, sms);
|
||||
ServerMediaSession *sms = ServerMediaSession::createNew(
|
||||
*env, stream_name, stream_name, description);
|
||||
if (videoSubsession != nullptr) {
|
||||
sms->addSubsession(videoSubsession);
|
||||
}
|
||||
if (audioSubsession != nullptr) {
|
||||
sms->addSubsession(audioSubsession);
|
||||
}
|
||||
rtspServer->addServerMediaSession(sms);
|
||||
announceURL(rtspServer, sms);
|
||||
|
||||
stop_running = 0;
|
||||
result = pthread_create(&rtsp_thread, NULL, rtsp_loop, NULL);
|
||||
if (result != 0) {
|
||||
*env << "Create RTSP server thread failed, result: " << result << "\n";
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
stop_running = 0;
|
||||
result = pthread_create(&rtsp_thread, NULL, rtsp_loop, NULL);
|
||||
if (result != 0) {
|
||||
*env << "Create RTSP server thread failed, result: " << result << "\n";
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static void rtsp_stop() {
|
||||
stop_running = 1;
|
||||
pthread_join(rtsp_thread, NULL);
|
||||
videoSubsession = audioSubsession = nullptr;
|
||||
// 若销毁RTSP服务端时客户端未断开连接则不会关闭连接, 所以在销毁之前关闭一下
|
||||
rtspServer->closeAllClientSessionsForServerMediaSession(stream_name);
|
||||
Medium::close(rtspServer);
|
||||
if (env && !env->reclaim()) {
|
||||
fprintf(stderr, "!!! UsageEnvironment release failed !!!\n");
|
||||
}
|
||||
delete scheduler;
|
||||
stop_running = 1;
|
||||
pthread_join(rtsp_thread, NULL);
|
||||
videoSubsession = audioSubsession = nullptr;
|
||||
// 若销毁RTSP服务端时客户端未断开连接则不会关闭连接, 所以在销毁之前关闭一下
|
||||
rtspServer->closeAllClientSessionsForServerMediaSession(stream_name);
|
||||
Medium::close(rtspServer);
|
||||
if (env && !env->reclaim()) {
|
||||
fprintf(stderr, "!!! UsageEnvironment release failed !!!\n");
|
||||
}
|
||||
delete scheduler;
|
||||
}
|
||||
|
||||
MODULE_RUN(rtsp_start, rtsp_stop, 09);
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
#ifndef __MY_COMMON_H__
|
||||
#define __MY_COMMON_H__
|
||||
|
||||
#ifndef ARRAY_LENGTH
|
||||
#define ARRAY_LENGTH(arr) (sizeof(arr) / sizeof(arr[0]))
|
||||
#endif
|
||||
|
||||
#endif
|
11
xmake.lua
11
xmake.lua
|
@ -1,5 +1,5 @@
|
|||
add_rules("mode.debug", "mode.release")
|
||||
includes("Hi3516_SDK.lua", "live555.lua")
|
||||
includes("Hi3516_SDK.lua", "live555.lua", " readerwriterqueue.lua")
|
||||
|
||||
--- Add Cross Compile Toolchain
|
||||
toolchain("arm-himix200-linux")
|
||||
|
@ -20,13 +20,14 @@ add_requires(
|
|||
"zlog 1.2.17",
|
||||
"mongoose 7.15",
|
||||
"live555 2024.11.28",
|
||||
"readerwriterqueue 16b48ae1148284e7b40abf72167206a4390a4592",
|
||||
{ system = false }
|
||||
)
|
||||
|
||||
target("ISP")
|
||||
set_kind("static")
|
||||
add_files("src/modules/isp/*.c")
|
||||
add_includedirs("src")
|
||||
add_includedirs("src", "src/modules")
|
||||
add_deps("sample_common")
|
||||
add_packages("zlog")
|
||||
target_end()
|
||||
|
@ -34,7 +35,7 @@ target_end()
|
|||
target("NNIE")
|
||||
set_kind("static")
|
||||
add_files("src/modules/nnie/*.c")
|
||||
add_includedirs("src")
|
||||
add_includedirs("src", "src/modules")
|
||||
add_deps("sample_common", "sample_svp")
|
||||
add_packages("zlog")
|
||||
target_end()
|
||||
|
@ -42,8 +43,8 @@ target_end()
|
|||
target("RTSP")
|
||||
set_kind("static")
|
||||
add_files("src/modules/rtsp/*.cpp")
|
||||
add_includedirs("src")
|
||||
add_packages("live555", "zlog")
|
||||
add_includedirs("src", "src/modules")
|
||||
add_packages("live555", "zlog", "readerwriterqueue")
|
||||
add_links("pthread")
|
||||
target_end()
|
||||
|
||||
|
|
Loading…
Reference in New Issue