This commit is contained in:
rayjay 2024-03-28 11:15:19 +08:00
commit a33c1d5a08
72 changed files with 2174 additions and 1172 deletions

@ -1 +1 @@
Subproject commit fca6d2328871fc6af75e215f89c3f1092ba5bb21
Subproject commit 04d1c47d2568f5ce1ff84260cefaf2754e514a5e

@ -1 +1 @@
Subproject commit 043853ee7c004e3e5d5bf3d06f7a82d97155b0d1
Subproject commit 527c0f5117b489fda78fcd123d446370ddd9ec9a

View File

@ -44,7 +44,6 @@ Xinghua Zhao <(holychaossword@hotmail.com>
[Dw9](https://github.com/Dw9)
明月惊鹊 <mingyuejingque@gmail.com>
cgm <2958580318@qq.com>
hejilin <1724010622@qq.com>
alexliyu7352 <liyu7352@gmail.com>
cgm <2958580318@qq.com>
[haorui wang](https://github.com/HaoruiWang)
@ -104,3 +103,8 @@ WuPeng <wp@zafu.edu.cn>
[sandro-qiang](https://github.com/sandro-qiang)
[Paul Philippov](https://github.com/themactep)
[张传峰](https://github.com/zhang-chuanfeng)
[lidaofu-hub](https://github.com/lidaofu-hub)
[huangcaichun](https://github.com/huangcaichun)
[jamesZHANG500](https://github.com/jamesZHANG500)
[weidelong](https://github.com/wdl1697454803)
[小强先生](https://github.com/linshangqiang)

View File

@ -141,8 +141,8 @@ if(GIT_FOUND)
endif()
configure_file(
${CMAKE_CURRENT_SOURCE_DIR}/version.h.ini
${CMAKE_CURRENT_BINARY_DIR}/version.h
${CMAKE_CURRENT_SOURCE_DIR}/ZLMVersion.h.ini
${CMAKE_CURRENT_BINARY_DIR}/ZLMVersion.h
@ONLY)
message(STATUS "Git version is ${BRANCH_NAME} ${COMMIT_HASH}/${COMMIT_TIME} ${BUILD_TIME}")
@ -532,3 +532,9 @@ endif ()
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/www" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/conf/config.ini" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/default.pem" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
# VideoStack
# Copy the default background image used by VideoStack when there is no video stream
if (ENABLE_FFMPEG AND ENABLE_X264)
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/conf/novideo.yuv" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
endif ()

View File

@ -358,6 +358,11 @@ bash build_docker_images.sh
[sandro-qiang](https://github.com/sandro-qiang)
[Paul Philippov](https://github.com/themactep)
[张传峰](https://github.com/zhang-chuanfeng)
[lidaofu-hub](https://github.com/lidaofu-hub)
[huangcaichun](https://github.com/huangcaichun)
[jamesZHANG500](https://github.com/jamesZHANG500)
[weidelong](https://github.com/wdl1697454803)
[小强先生](https://github.com/linshangqiang)
同时感谢JetBrains对开源项目的支持本项目使用CLion开发与调试

View File

@ -516,6 +516,11 @@ Thanks to all those who have supported this project in various ways, including b
[sandro-qiang](https://github.com/sandro-qiang)
[Paul Philippov](https://github.com/themactep)
[张传峰](https://github.com/zhang-chuanfeng)
[lidaofu-hub](https://github.com/lidaofu-hub)
[huangcaichun](https://github.com/huangcaichun)
[jamesZHANG500](https://github.com/jamesZHANG500)
[weidelong](https://github.com/wdl1697454803)
[小强先生](https://github.com/linshangqiang)
Also thank to JetBrains for their support for open source project, we developed and debugged zlmediakit with CLion:

View File

@ -177,6 +177,33 @@ typedef struct {
*/
void(API_CALL *on_mk_media_send_rtp_stop)(const char *vhost, const char *app, const char *stream, const char *ssrc, int err, const char *msg);
/**
* rtc sctp连接中///
* @param rtc_transport
*/
void(API_CALL *on_mk_rtc_sctp_connecting)(mk_rtc_transport rtc_transport);
void(API_CALL *on_mk_rtc_sctp_connected)(mk_rtc_transport rtc_transport);
void(API_CALL *on_mk_rtc_sctp_failed)(mk_rtc_transport rtc_transport);
void(API_CALL *on_mk_rtc_sctp_closed)(mk_rtc_transport rtc_transport);
/**
* rtc数据通道发送数据回调
* @param rtc_transport
* @param msg
* @param len
*/
void(API_CALL *on_mk_rtc_sctp_send)(mk_rtc_transport rtc_transport, const uint8_t *msg, size_t len);
/**
* rtc数据通道接收数据回调
* @param rtc_transport
* @param streamId id
* @param ppid id
* @param msg
* @param len
*/
void(API_CALL *on_mk_rtc_sctp_received)(mk_rtc_transport rtc_transport, uint16_t streamId, uint32_t ppid, const uint8_t *msg, size_t len);
} mk_events;

View File

@ -352,6 +352,20 @@ API_EXPORT mk_auth_invoker API_CALL mk_auth_invoker_clone(const mk_auth_invoker
*/
API_EXPORT void API_CALL mk_auth_invoker_clone_release(const mk_auth_invoker ctx);
///////////////////////////////////////////WebRtcTransport/////////////////////////////////////////////
//WebRtcTransport对象的C映射
typedef struct mk_rtc_transport_t *mk_rtc_transport;
/**
* rtc数据通道
* @param ctx
* @param streamId id
* @param ppid id
* @param msg
* @param len
*/
API_EXPORT void API_CALL mk_rtc_send_datachannel(const mk_rtc_transport ctx, uint16_t streamId, uint32_t ppid, const char* msg, size_t len);
#ifdef __cplusplus
}
#endif

View File

@ -33,12 +33,11 @@ static TcpServer::Ptr shell_server;
#ifdef ENABLE_RTPPROXY
#include "Rtp/RtpServer.h"
static std::shared_ptr<RtpServer> rtpServer;
static RtpServer::Ptr rtpServer;
#endif
#ifdef ENABLE_WEBRTC
#include "../webrtc/WebRtcSession.h"
#include "../webrtc/WebRtcTransport.h"
static UdpServer::Ptr rtcServer_udp;
static TcpServer::Ptr rtcServer_tcp;
#endif
@ -305,10 +304,10 @@ API_EXPORT void API_CALL mk_webrtc_get_answer_sdp2(void *user_data, on_user_data
std::string offer_str = offer;
std::shared_ptr<void> ptr(user_data, user_data_free ? user_data_free : [](void *) {});
auto args = std::make_shared<WebRtcArgsUrl>(url);
WebRtcPluginManager::Instance().getAnswerSdp(*session, type, *args,
[offer_str, session, ptr, cb](const WebRtcInterface &exchanger) mutable {
WebRtcPluginManager::Instance().negotiateSdp(*session, type, *args, [offer_str, session, ptr, cb](const WebRtcInterface &exchanger) mutable {
auto &handler = const_cast<WebRtcInterface &>(exchanger);
try {
auto sdp_answer = exchangeSdp(exchanger, offer_str);
auto sdp_answer = handler.getAnswerSdp(offer_str);
cb(ptr.get(), sdp_answer.data(), nullptr);
} catch (std::exception &ex) {
cb(ptr.get(), nullptr, ex.what());

View File

@ -15,6 +15,10 @@
#include "Rtsp/RtspSession.h"
#include "Record/MP4Recorder.h"
#ifdef ENABLE_WEBRTC
#include "webrtc/WebRtcTransport.h"
#endif
using namespace toolkit;
using namespace mediakit;
@ -167,6 +171,43 @@ API_EXPORT void API_CALL mk_events_listen(const mk_events *events){
sender.getMediaTuple().stream.c_str(), ssrc.c_str(), ex.getErrCode(), ex.what());
}
});
#ifdef ENABLE_WEBRTC
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpConnecting,[](BroadcastRtcSctpConnectArgs){
if (s_events.on_mk_rtc_sctp_connecting) {
s_events.on_mk_rtc_sctp_connecting((mk_rtc_transport)&sender);
}
});
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpConnected,[](BroadcastRtcSctpConnectArgs){
if (s_events.on_mk_rtc_sctp_connected) {
s_events.on_mk_rtc_sctp_connected((mk_rtc_transport)&sender);
}
});
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpFailed,[](BroadcastRtcSctpConnectArgs){
if (s_events.on_mk_rtc_sctp_failed) {
s_events.on_mk_rtc_sctp_failed((mk_rtc_transport)&sender);
}
});
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpClosed,[](BroadcastRtcSctpConnectArgs){
if (s_events.on_mk_rtc_sctp_closed) {
s_events.on_mk_rtc_sctp_closed((mk_rtc_transport)&sender);
}
});
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpSend,[](BroadcastRtcSctpSendArgs){
if (s_events.on_mk_rtc_sctp_send) {
s_events.on_mk_rtc_sctp_send((mk_rtc_transport)&sender, data, len);
}
});
NoticeCenter::Instance().addListener(&s_tag, Broadcast::kBroadcastRtcSctpReceived,[](BroadcastRtcSctpReceivedArgs){
if (s_events.on_mk_rtc_sctp_received) {
s_events.on_mk_rtc_sctp_received((mk_rtc_transport)&sender, streamId, ppid, msg, len);
}
});
#endif
});
}

View File

@ -18,6 +18,10 @@
#include "Http/HttpClient.h"
#include "Rtsp/RtspSession.h"
#ifdef ENABLE_WEBRTC
#include "webrtc/WebRtcTransport.h"
#endif
using namespace toolkit;
using namespace mediakit;
@ -498,3 +502,21 @@ API_EXPORT void API_CALL mk_auth_invoker_clone_release(const mk_auth_invoker ctx
Broadcast::AuthInvoker *invoker = (Broadcast::AuthInvoker *)ctx;
delete invoker;
}
///////////////////////////////////////////WebRtcTransport/////////////////////////////////////////////
API_EXPORT void API_CALL mk_rtc_sendDatachannel(const mk_rtc_transport ctx, uint16_t streamId, uint32_t ppid, const char *msg, size_t len) {
#ifdef ENABLE_WEBRTC
assert(ctx && msg);
WebRtcTransport *transport = (WebRtcTransport *)ctx;
std::string msg_str(msg, len);
std::weak_ptr<WebRtcTransport> weak_trans = transport->shared_from_this();
transport->getPoller()->async([streamId, ppid, msg_str, weak_trans]() {
// 切换线程后再操作
if (auto trans = weak_trans.lock()) {
trans->sendDatachannel(streamId, ppid, msg_str.c_str(), msg_str.size());
}
});
#else
WarnL << "未启用webrtc功能, 编译时请开启ENABLE_WEBRTC";
#endif
}

View File

@ -277,6 +277,8 @@ sampleMS=500
fastStart=0
#MP4点播(rtsp/rtmp/http-flv/ws-flv)是否循环播放文件
fileRepeat=0
#MP4录制写文件格式是否采用fmp4启用的话断电未完成录制的文件也能正常打开
enableFmp4=0
[rtmp]
#rtmp必须在此时间内完成握手否则服务器会断开链接单位秒
@ -329,6 +331,13 @@ opus_pt=100
#如果不调用startSendRtp相关接口可以置0节省内存
gop_cache=1
#国标发送g711 rtp 打包时每个包的语音时长是多少默认是100 ms范围为20~180ms (gb28181-2016c.2.4规定)
#最好为20 的倍数程序自动向20的倍数取整
rtp_g711_dur_ms = 100
#udp接收数据socket buffer大小配置
#4*1024*1024=4196304
udp_recv_socket_buffer=4194304
[rtc]
#rtc播放推流、播放超时时间
timeoutSec=15
@ -348,7 +357,7 @@ tcpPort = 8000
rembBitRate=0
#rtc支持的音频codec类型,在前面的优先级更高
#以下范例为所有支持的音频codec
preferredCodecA=PCMU,PCMA,opus,mpeg4-generic
preferredCodecA=PCMA,PCMU,opus,mpeg4-generic
#rtc支持的视频codec类型,在前面的优先级更高
#以下范例为所有支持的视频codec
preferredCodecV=H264,H265,AV1,VP9,VP8

1
conf/novideo.yuv Normal file

File diff suppressed because one or more lines are too long

View File

@ -17,7 +17,7 @@ using namespace toolkit;
namespace mediakit {
void AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {
CHECK(pkt->size() > 2);
CHECK_RET(pkt->size() > 2);
if (pkt->isConfigFrame()) {
getTrack()->setExtraData((uint8_t *)pkt->data() + 2, pkt->size() - 2);
return;

View File

@ -8,13 +8,27 @@ G711RtpEncoder::G711RtpEncoder(CodecId codec, uint32_t channels){
_channels = channels;
}
void G711RtpEncoder::setOpt(int opt, const toolkit::Any &param) {
if (opt == RTP_ENCODER_PKT_DUR_MS) {
if (param.is<uint32_t>()) {
auto dur = param.get<uint32_t>();
if (dur < 20 || dur > 180) {
WarnL << "set g711 rtp encoder duration ms failed for " << dur;
return;
}
// 向上 20ms 取整
_pkt_dur_ms = (dur + 19) / 20 * 20;
}
}
}
bool G711RtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto dur = (_cache_frame->size() - _cache_frame->prefixSize()) / (8 * _channels);
auto next_pts = _cache_frame->pts() + dur;
if (next_pts == 0) {
_cache_frame->_pts = frame->pts();
} else {
if ((next_pts + 20) < frame->pts()) { // 有丢包超过20ms
if ((next_pts + _pkt_dur_ms) < frame->pts()) { // 有丢包超过20ms
_cache_frame->_pts = frame->pts() - dur;
}
}
@ -24,24 +38,20 @@ bool G711RtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto ptr = _cache_frame->data() + _cache_frame->prefixSize();
auto len = _cache_frame->size() - _cache_frame->prefixSize();
auto remain_size = len;
auto max_size = 160 * _channels; // 20 ms per rtp
int n = 0;
bool mark = false;
size_t max_size = 160 * _channels * _pkt_dur_ms / 20; // 20 ms per 160 byte
size_t n = 0;
bool mark = true;
while (remain_size >= max_size) {
size_t rtp_size;
if (remain_size >= max_size) {
rtp_size = max_size;
} else {
break;
}
assert(remain_size >= max_size);
const size_t rtp_size = max_size;
n++;
stamp += 20;
RtpCodec::inputRtp(getRtpInfo().makeRtp(TrackAudio, ptr, rtp_size, mark, stamp), false);
stamp += _pkt_dur_ms;
RtpCodec::inputRtp(getRtpInfo().makeRtp(TrackAudio, ptr, rtp_size, mark, stamp), true);
ptr += rtp_size;
remain_size -= rtp_size;
}
_cache_frame->_buffer.erase(0, n * max_size);
_cache_frame->_pts += 20 * n;
_cache_frame->_pts += (uint64_t)_pkt_dur_ms * n;
return len > 0;
}

View File

@ -36,8 +36,11 @@ public:
*/
bool inputFrame(const Frame::Ptr &frame) override;
void setOpt(int opt, const toolkit::Any &param) override;
private:
uint32_t _channels = 1;
uint32_t _pkt_dur_ms = 20;
FrameImp::Ptr _cache_frame;
};

View File

@ -14,14 +14,6 @@
using namespace std;
using namespace toolkit;
#define CHECK_RET(...) \
try { \
CHECK(__VA_ARGS__); \
} catch (AssertFailedException & ex) { \
WarnL << ex.what(); \
return; \
}
namespace mediakit {
void H264RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {

View File

@ -18,14 +18,6 @@
using namespace std;
using namespace toolkit;
#define CHECK_RET(...) \
try { \
CHECK(__VA_ARGS__); \
} catch (AssertFailedException & ex) { \
WarnL << ex.what(); \
return; \
}
namespace mediakit {
void H265RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt) {

View File

@ -31,7 +31,9 @@ if(PKG_CONFIG_FOUND)
list(APPEND LINK_LIBRARIES PkgConfig::SDL2)
message(STATUS "found library: ${SDL2_LIBRARIES}")
endif()
else()
endif()
if(NOT SDL2_FOUND)
find_package(SDL2 QUIET)
if(SDL2_FOUND)
include_directories(SYSTEM ${SDL2_INCLUDE_DIR})

View File

@ -1,11 +1,10 @@
{
"info": {
"_postman_id": "08e3bc35-5318-4949-81bb-90d854706194",
"_postman_id": "8b3cdc62-3e18-4700-9ddd-dc9f58ebce83",
"name": "ZLMediaKit",
"description": "媒体服务器",
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
"_exporter_id": "29185956",
"_collection_link": "https://lively-station-598157.postman.co/workspace/%E6%B5%81%E5%AA%92%E4%BD%93%E6%9C%8D%E5%8A%A1~1e119172-45b0-4ed6-b1fc-8a15d0e2d5f8/collection/29185956-08e3bc35-5318-4949-81bb-90d854706194?action=share&source=collection_link&creator=29185956"
"_exporter_id": "26338564"
},
"item": [
{
@ -34,6 +33,72 @@
},
"response": []
},
{
"name": "关闭多屏拼接(stack/stop)",
"request": {
"method": "GET",
"header": [],
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/getApiList?secret={{ZLMediaKit_secret}}&id=stack_test",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"getApiList"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
},
{
"key": "id",
"value": "stack_test"
}
]
}
},
"response": []
},
{
"name": "添加多屏拼接(stack/start)",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"gapv\": 0.002,\r\n \"gaph\": 0.001,\r\n \"width\": 1920,\r\n \"url\": [\r\n [\r\n \"rtsp://kkem.me/live/test3\",\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy2\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy5\",\r\n \"rtsp://kkem.me/live/cy3\",\r\n \"rtsp://kkem.me/live/cy4\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy5\",\r\n \"rtsp://kkem.me/live/cy6\",\r\n \"rtsp://kkem.me/live/cy7\",\r\n \"rtsp://kkem.me/live/cy8\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy9\",\r\n \"rtsp://kkem.me/live/cy10\",\r\n \"rtsp://kkem.me/live/cy11\",\r\n \"rtsp://kkem.me/live/cy12\"\r\n ]\r\n ],\r\n \"id\": \"89\",\r\n \"row\": 4,\r\n \"col\": 4,\r\n \"height\": 1080,\r\n \"span\": [\r\n [\r\n [\r\n 0,\r\n 0\r\n ],\r\n [\r\n 1,\r\n 1\r\n ]\r\n ],\r\n [\r\n [\r\n 3,\r\n 0\r\n ],\r\n [\r\n 3,\r\n 1\r\n ]\r\n ],\r\n [\r\n [\r\n 2,\r\n 3\r\n ],\r\n [\r\n 3,\r\n 3\r\n ]\r\n ]\r\n ]\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "{{ZLMediaKit_URL}}/index/api/stack/start?secret={{ZLMediaKit_secret}}",
"host": [
"{{ZLMediaKit_URL}}"
],
"path": [
"index",
"api",
"stack",
"start"
],
"query": [
{
"key": "secret",
"value": "{{ZLMediaKit_secret}}",
"description": "api操作密钥(配置文件配置)"
}
]
}
},
"response": []
},
{
"name": "获取网络线程负载(getThreadsLoad)",
"request": {
@ -1470,9 +1535,9 @@
"disabled": true
},
{
"key": "only_audio",
"key": "only_track",
"value": "1",
"description": "是否为单音频track用于语音对讲",
"description": "是否为单音频/单视频track0不设置1单音频2单视频",
"disabled": true
},
{
@ -1523,9 +1588,9 @@
"description": "该端口绑定的流id\n"
},
{
"key": "only_audio",
"key": "only_track",
"value": "0",
"description": "是否为单音频track用于语音对讲",
"description": "是否为单音频/单视频track0不设置1单音频2单视频",
"disabled": true
},
{

592
server/VideoStack.cpp Normal file
View File

@ -0,0 +1,592 @@
#if defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
#include "VideoStack.h"
#include "Codec/Transcode.h"
#include "Common/Device.h"
#include "Util/logger.h"
#include "Util/util.h"
#include "json/value.h"
#include <Thread/WorkThreadPool.h>
#include <fstream>
#include <libavutil/pixfmt.h>
#include <memory>
#include <mutex>
// ITU-R BT.601
// #define RGB_TO_Y(R, G, B) ((( 66 * (R) + 129 * (G) + 25 * (B)+128) >> 8)+16)
// #define RGB_TO_U(R, G, B) (((-38 * (R) - 74 * (G) + 112 * (B)+128) >> 8)+128)
// #define RGB_TO_V(R, G, B) (((112 * (R) - 94 * (G) - 18 * (B)+128) >> 8)+128)
// ITU-R BT.709
#define RGB_TO_Y(R, G, B) (((47 * (R) + 157 * (G) + 16 * (B) + 128) >> 8) + 16)
#define RGB_TO_U(R, G, B) (((-26 * (R)-87 * (G) + 112 * (B) + 128) >> 8) + 128)
#define RGB_TO_V(R, G, B) (((112 * (R)-102 * (G)-10 * (B) + 128) >> 8) + 128)
INSTANCE_IMP(VideoStackManager)
Param::~Param()
{
VideoStackManager::Instance().unrefChannel(
id, width, height, pixfmt);
}
Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt)
: _id(id)
, _width(width)
, _height(height)
, _pixfmt(pixfmt)
{
_tmp = std::make_shared<mediakit::FFmpegFrame>();
_tmp->get()->width = _width;
_tmp->get()->height = _height;
_tmp->get()->format = _pixfmt;
av_frame_get_buffer(_tmp->get(), 32);
memset(_tmp->get()->data[0], 0, _tmp->get()->linesize[0] * _height);
memset(_tmp->get()->data[1], 0, _tmp->get()->linesize[1] * _height / 2);
memset(_tmp->get()->data[2], 0, _tmp->get()->linesize[2] * _height / 2);
auto frame = VideoStackManager::Instance().getBgImg();
_sws = std::make_shared<mediakit::FFmpegSws>(_pixfmt, _width, _height);
_tmp = _sws->inputFrame(frame);
}
void Channel::addParam(const std::weak_ptr<Param>& p)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
_params.push_back(p);
}
void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
{
std::weak_ptr<Channel> weakSelf = shared_from_this();
_poller = _poller ? _poller : toolkit::WorkThreadPool::Instance().getPoller();
_poller->async([weakSelf, frame]() {
auto self = weakSelf.lock();
if (!self) {
return;
}
self->_tmp = self->_sws->inputFrame(frame);
self->forEachParam([self](const Param::Ptr& p) { self->fillBuffer(p); });
});
}
void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func)
{
for (auto& wp : _params) {
if (auto sp = wp.lock()) {
func(sp);
}
}
}
void Channel::fillBuffer(const Param::Ptr& p)
{
if (auto buf = p->weak_buf.lock()) {
copyData(buf, p);
}
}
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p)
{
switch (p->pixfmt) {
case AV_PIX_FMT_YUV420P: {
for (int i = 0; i < p->height; i++) {
memcpy(buf->get()->data[0] + buf->get()->linesize[0] * (i + p->posY) + p->posX,
_tmp->get()->data[0] + _tmp->get()->linesize[0] * i,
_tmp->get()->width);
}
//确保height为奇数时也能正确的复制到最后一行uv数据
for (int i = 0; i < (p->height + 1) / 2; i++) {
// U平面
memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) + p->posX / 2,
_tmp->get()->data[1] + _tmp->get()->linesize[1] * i,
_tmp->get()->width / 2);
// V平面
memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) + p->posX / 2,
_tmp->get()->data[2] + _tmp->get()->linesize[2] * i,
_tmp->get()->width / 2);
}
break;
}
case AV_PIX_FMT_NV12: {
//TODO: 待实现
break;
}
default:
WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt);
break;
}
}
void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
_channels.push_back(chn);
}
void StackPlayer::play()
{
auto url = _url;
//创建拉流 解码对象
_player = std::make_shared<mediakit::MediaPlayer>();
std::weak_ptr<mediakit::MediaPlayer> weakPlayer = _player;
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
(*_player)[mediakit::Client::kWaitTrackReady] = false;
(*_player)[mediakit::Client::kRtpType] = mediakit::Rtsp::RTP_TCP;
_player->setOnPlayResult([weakPlayer, weakSelf, url](const toolkit::SockException& ex) mutable {
TraceL << "StackPlayer: " << url << " OnPlayResult: " << ex.what();
auto strongPlayer = weakPlayer.lock();
if (!strongPlayer) {
return;
}
auto self = weakSelf.lock();
if (!self) {
return;
}
if (!ex) {
// 取消定时器
self->_timer.reset();
self->_failedCount = 0;
} else {
self->onDisconnect();
self->rePlay(url);
}
auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(strongPlayer->getTrack(mediakit::TrackVideo, false));
//auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false));
if (videoTrack) {
//TODO:添加使用显卡还是cpu解码的判断逻辑
//auto decoder = std::make_shared<FFmpegDecoder>(videoTrack, 1, std::vector<std::string>{ "hevc_cuvid", "h264_cuvid"});
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(videoTrack, 0, std::vector<std::string> { "h264", "hevc" });
decoder->setOnDecode([weakSelf](const mediakit::FFmpegFrame::Ptr& frame) mutable {
auto self = weakSelf.lock();
if (!self) {
return;
}
self->onFrame(frame);
});
videoTrack->addDelegate([decoder](const mediakit::Frame::Ptr& frame) {
return decoder->inputFrame(frame, false, true);
});
}
});
_player->setOnShutdown([weakPlayer, url, weakSelf](const toolkit::SockException& ex) {
TraceL << "StackPlayer: " << url << " OnShutdown: " << ex.what();
auto strongPlayer = weakPlayer.lock();
if (!strongPlayer) {
return;
}
auto self = weakSelf.lock();
if (!self) {
return;
}
self->onDisconnect();
self->rePlay(url);
});
_player->play(url);
}
void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
for (auto& weak_chn : _channels) {
if (auto chn = weak_chn.lock()) {
chn->onFrame(frame);
}
}
}
void StackPlayer::onDisconnect()
{
std::lock_guard<std::recursive_mutex> lock(_mx);
for (auto& weak_chn : _channels) {
if (auto chn = weak_chn.lock()) {
auto frame = VideoStackManager::Instance().getBgImg();
chn->onFrame(frame);
}
}
}
void StackPlayer::rePlay(const std::string& url)
{
_failedCount++;
auto delay = MAX(2 * 1000, MIN(_failedCount * 3 * 1000, 60 * 1000)); //步进延迟 重试间隔
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
_timer = std::make_shared<toolkit::Timer>(
delay / 1000.0f, [weakSelf, url]() {
auto self = weakSelf.lock();
if (!self) {
}
WarnL << "replay [" << self->_failedCount << "]:" << url;
self->_player->play(url);
return false;
},
nullptr);
}
VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt, float fps, int bitRate)
: _id(id)
, _width(width)
, _height(height)
, _pixfmt(pixfmt)
, _fps(fps)
, _bitRate(bitRate)
{
_buffer = std::make_shared<mediakit::FFmpegFrame>();
_buffer->get()->width = _width;
_buffer->get()->height = _height;
_buffer->get()->format = _pixfmt;
av_frame_get_buffer(_buffer->get(), 32);
_dev = std::make_shared<mediakit::DevChannel>(mediakit::MediaTuple { DEFAULT_VHOST, "live", _id });
mediakit::VideoInfo info;
info.codecId = mediakit::CodecH264;
info.iWidth = _width;
info.iHeight = _height;
info.iFrameRate = _fps;
info.iBitRate = _bitRate;
_dev->initVideo(info);
//dev->initAudio(); //TODO:音频
_dev->addTrackCompleted();
_isExit = false;
}
VideoStack::~VideoStack()
{
_isExit = true;
if (_thread.joinable()) {
_thread.join();
}
}
void VideoStack::setParam(const Params& params)
{
if (_params) {
for (auto& p : (*_params)) {
if (!p)
continue;
p->weak_buf.reset();
}
}
initBgColor();
for (auto& p : (*params)) {
if (!p)
continue;
p->weak_buf = _buffer;
if (auto chn = p->weak_chn.lock()) {
chn->addParam(p);
chn->fillBuffer(p);
}
}
_params = params;
}
void VideoStack::start()
{
_thread = std::thread([&]() {
uint64_t pts = 0;
int frameInterval = 1000 / _fps;
auto lastEncTP = std::chrono::steady_clock::now();
while (!_isExit) {
if (std::chrono::steady_clock::now() - lastEncTP > std::chrono::milliseconds(frameInterval)) {
lastEncTP = std::chrono::steady_clock::now();
_dev->inputYUV((char**)_buffer->get()->data, _buffer->get()->linesize, pts);
pts += frameInterval;
}
}
});
}
void VideoStack::initBgColor()
{
//填充底色
auto R = 20;
auto G = 20;
auto B = 20;
double Y = RGB_TO_Y(R, G, B);
double U = RGB_TO_U(R, G, B);
double V = RGB_TO_V(R, G, B);
memset(_buffer->get()->data[0], Y, _buffer->get()->linesize[0] * _height);
memset(_buffer->get()->data[1], U, _buffer->get()->linesize[1] * _height / 2);
memset(_buffer->get()->data[2], V, _buffer->get()->linesize[2] * _height / 2);
}
Channel::Ptr VideoStackManager::getChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
auto it = _channelMap.find(key);
if (it != _channelMap.end()) {
return it->second->acquire();
}
return createChannel(id, width, height, pixfmt);
}
void VideoStackManager::unrefChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
auto chn_it = _channelMap.find(key);
if (chn_it != _channelMap.end() && chn_it->second->dispose()) {
_channelMap.erase(chn_it);
auto player_it = _playerMap.find(id);
if (player_it != _playerMap.end() && player_it->second->dispose()) {
_playerMap.erase(player_it);
}
}
}
int VideoStackManager::startVideoStack(const Json::Value& json)
{
std::string id;
int width, height;
auto params = parseParams(json, id, width, height);
if (!params) {
ErrorL << "Videostack parse params failed!";
return -1;
}
auto stack = std::make_shared<VideoStack>(id, width, height);
for (auto& p : (*params)) {
if (!p)
continue;
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
}
stack->setParam(params);
stack->start();
std::lock_guard<std::recursive_mutex> lock(_mx);
_stackMap[id] = stack;
return 0;
}
int VideoStackManager::resetVideoStack(const Json::Value& json)
{
std::string id;
int width, height;
auto params = parseParams(json, id, width, height);
if (!params) {
return -1;
}
VideoStack::Ptr stack;
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto it = _stackMap.find(id);
if (it == _stackMap.end()) {
return -2;
}
stack = it->second;
}
for (auto& p : (*params)) {
if (!p)
continue;
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
}
stack->setParam(params);
return 0;
}
int VideoStackManager::stopVideoStack(const std::string& id)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto it = _stackMap.find(id);
if (it != _stackMap.end()) {
_stackMap.erase(it);
InfoL << "VideoStack stop: " << id;
return 0;
}
return -1;
}
mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg()
{
return _bgImg;
}
Params VideoStackManager::parseParams(const Json::Value& json,
std::string& id,
int& width,
int& height)
{
try {
id = json["id"].asString();
width = json["width"].asInt();
height = json["height"].asInt();
int rows = json["row"].asInt(); //堆叠行数
int cols = json["col"].asInt(); //堆叠列数
float gapv = json["gapv"].asFloat(); //垂直间距
float gaph = json["gaph"].asFloat(); //水平间距
//单个间距
int gaphPix = static_cast<int>(round(width * gaph));
int gapvPix = static_cast<int>(round(height * gapv));
// 根据间距计算格子宽高
int gridWidth = cols > 1 ? (width - gaphPix * (cols - 1)) / cols : width;
int gridHeight = rows > 1 ? (height - gapvPix * (rows - 1)) / rows : height;
auto params = std::make_shared<std::vector<Param::Ptr>>(rows * cols);
for (int row = 0; row < rows; row++) {
for (int col = 0; col < cols; col++) {
std::string url = json["url"][row][col].asString();
auto param = std::make_shared<Param>();
param->posX = gridWidth * col + col * gaphPix;
param->posY = gridHeight * row + row * gapvPix;
param->width = gridWidth;
param->height = gridHeight;
param->id = url;
(*params)[row * cols + col] = param;
}
}
//判断是否需要合并格子 (焦点屏)
if (!json["span"].empty() && json.isMember("span")) {
for (const auto& subArray : json["span"]) {
if (!subArray.isArray() || subArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' sub-array format in JSON");
}
std::array<int, 4> mergePos;
int index = 0;
for (const auto& innerArray : subArray) {
if (!innerArray.isArray() || innerArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' inner-array format in JSON");
}
for (const auto& number : innerArray) {
if (index < mergePos.size()) {
mergePos[index++] = number.asInt();
}
}
}
for (int i = mergePos[0]; i <= mergePos[2]; i++) {
for (int j = mergePos[1]; j <= mergePos[3]; j++) {
if (i == mergePos[0] && j == mergePos[1]) {
(*params)[i * cols + j]->width = (mergePos[3] - mergePos[1] + 1) * gridWidth + (mergePos[3] - mergePos[1]) * gapvPix;
(*params)[i * cols + j]->height = (mergePos[2] - mergePos[0] + 1) * gridHeight + (mergePos[2] - mergePos[0]) * gaphPix;
} else {
(*params)[i * cols + j] = nullptr;
}
}
}
}
}
return params;
} catch (const std::exception& e) {
ErrorL << "Videostack parse params failed! " << e.what();
return nullptr;
}
}
bool VideoStackManager::loadBgImg(const std::string& path)
{
_bgImg = std::make_shared<mediakit::FFmpegFrame>();
_bgImg->get()->width = 1280;
_bgImg->get()->height = 720;
_bgImg->get()->format = AV_PIX_FMT_YUV420P;
av_frame_get_buffer(_bgImg->get(), 32);
std::ifstream file(path, std::ios::binary);
if (!file.is_open()) {
return false;
}
file.read((char*)_bgImg->get()->data[0], _bgImg->get()->linesize[0] * _bgImg->get()->height); // Y
file.read((char*)_bgImg->get()->data[1], _bgImg->get()->linesize[1] * _bgImg->get()->height / 2); // U
file.read((char*)_bgImg->get()->data[2], _bgImg->get()->linesize[2] * _bgImg->get()->height / 2); // V
return true;
}
Channel::Ptr VideoStackManager::createChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
StackPlayer::Ptr player;
auto it = _playerMap.find(id);
if (it != _playerMap.end()) {
player = it->second->acquire();
} else {
player = createPlayer(id);
}
auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(std::make_shared<Channel>(id, width, height, pixfmt));
auto chn = refChn->acquire();
player->addChannel(chn);
_channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] = refChn;
return chn;
}
StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id)
{
std::lock_guard<std::recursive_mutex> lock(_mx);
auto refPlayer = std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id));
_playerMap[id] = refPlayer;
auto player = refPlayer->acquire();
if (!id.empty()) {
player->play();
}
return player;
}
#endif

208
server/VideoStack.h Normal file
View File

@ -0,0 +1,208 @@
#pragma once
#if defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
#include "Codec/Transcode.h"
#include "Common/Device.h"
#include "Player/MediaPlayer.h"
#include "json/json.h"
#include <mutex>
template <typename T>
class RefWrapper {
public:
using Ptr = std::shared_ptr<RefWrapper<T>>;
template <typename... Args>
explicit RefWrapper(Args&&... args)
: _rc(0)
, _entity(std::forward<Args>(args)...)
{
}
T acquire()
{
++_rc;
return _entity;
}
bool dispose() { return --_rc <= 0; }
private:
T _entity;
std::atomic<int> _rc;
};
class Channel;
struct Param {
using Ptr = std::shared_ptr<Param>;
int posX = 0;
int posY = 0;
int width = 0;
int height = 0;
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P;
std::string id {};
// runtime
std::weak_ptr<Channel> weak_chn;
std::weak_ptr<mediakit::FFmpegFrame> weak_buf;
~Param();
};
using Params = std::shared_ptr<std::vector<Param::Ptr>>;
class Channel : public std::enable_shared_from_this<Channel> {
public:
using Ptr = std::shared_ptr<Channel>;
Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
void addParam(const std::weak_ptr<Param>& p);
void onFrame(const mediakit::FFmpegFrame::Ptr& frame);
void fillBuffer(const Param::Ptr& p);
protected:
void forEachParam(const std::function<void(const Param::Ptr&)>& func);
void copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p);
private:
std::string _id;
int _width;
int _height;
AVPixelFormat _pixfmt;
mediakit::FFmpegFrame::Ptr _tmp;
std::recursive_mutex _mx;
std::vector<std::weak_ptr<Param>> _params;
mediakit::FFmpegSws::Ptr _sws;
toolkit::EventPoller::Ptr _poller;
};
class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
public:
using Ptr = std::shared_ptr<StackPlayer>;
StackPlayer(const std::string& url)
: _url(url)
{
}
void addChannel(const std::weak_ptr<Channel>& chn);
void play();
void onFrame(const mediakit::FFmpegFrame::Ptr& frame);
void onDisconnect();
protected:
void rePlay(const std::string& url);
private:
std::string _url;
mediakit::MediaPlayer::Ptr _player;
//用于断线重连
toolkit::Timer::Ptr _timer;
int _failedCount = 0;
std::recursive_mutex _mx;
std::vector<std::weak_ptr<Channel>> _channels;
};
class VideoStack {
public:
using Ptr = std::shared_ptr<VideoStack>;
VideoStack(const std::string& url,
int width = 1920,
int height = 1080,
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P,
float fps = 25.0,
int bitRate = 2 * 1024 * 1024);
~VideoStack();
void setParam(const Params& params);
void start();
protected:
void initBgColor();
public:
Params _params;
mediakit::FFmpegFrame::Ptr _buffer;
private:
std::string _id;
int _width;
int _height;
AVPixelFormat _pixfmt;
float _fps;
int _bitRate;
mediakit::DevChannel::Ptr _dev;
bool _isExit;
std::thread _thread;
};
class VideoStackManager {
public:
static VideoStackManager& Instance();
Channel::Ptr getChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
void unrefChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
int startVideoStack(const Json::Value& json);
int resetVideoStack(const Json::Value& json);
int stopVideoStack(const std::string& id);
bool loadBgImg(const std::string& path);
mediakit::FFmpegFrame::Ptr getBgImg();
protected:
Params parseParams(const Json::Value& json,
std::string& id,
int& width,
int& height);
protected:
Channel::Ptr createChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
StackPlayer::Ptr createPlayer(const std::string& id);
private:
mediakit::FFmpegFrame::Ptr _bgImg;
private:
std::recursive_mutex _mx;
std::unordered_map<std::string, VideoStack::Ptr> _stackMap;
std::unordered_map<std::string, RefWrapper<Channel::Ptr>::Ptr> _channelMap;
std::unordered_map<std::string, RefWrapper<StackPlayer::Ptr>::Ptr> _playerMap;
};
#endif

View File

@ -59,7 +59,11 @@
#endif
#if defined(ENABLE_VERSION)
#include "version.h"
#include "ZLMVersion.h"
#endif
#if defined(ENABLE_X264) && defined (ENABLE_FFMPEG)
#include "VideoStack.h"
#endif
using namespace std;
@ -115,7 +119,7 @@ static HttpApi toApi(const function<void(API_ARGS_MAP_ASYNC)> &cb) {
//参数解析成map
auto args = getAllArgs(parser);
cb(sender, headerOut, HttpAllArgs<decltype(args)>(parser, args), val, invoker);
cb(sender, headerOut, ArgsMap(parser, args), val, invoker);
};
}
@ -143,7 +147,7 @@ static HttpApi toApi(const function<void(API_ARGS_JSON_ASYNC)> &cb) {
Json::Reader reader;
reader.parse(parser.content(), args);
cb(sender, headerOut, HttpAllArgs<decltype(args)>(parser, args), val, invoker);
cb(sender, headerOut, ArgsJson(parser, args), val, invoker);
};
}
@ -163,7 +167,7 @@ static HttpApi toApi(const function<void(API_ARGS_STRING_ASYNC)> &cb) {
Json::Value val;
val["code"] = API::Success;
cb(sender, headerOut, HttpAllArgs<string>(parser, (string &)parser.content()), val, invoker);
cb(sender, headerOut, ArgsString(parser, (string &)parser.content()), val, invoker);
};
}
@ -297,22 +301,71 @@ static inline void addHttpListener(){
});
}
template <typename Type>
class ServiceController {
public:
using Pointer = std::shared_ptr<Type>;
std::unordered_map<std::string, Pointer> _map;
mutable std::recursive_mutex _mtx;
void clear() {
decltype(_map) copy;
{
std::lock_guard<std::recursive_mutex> lck(_mtx);
copy.swap(_map);
}
}
size_t erase(const std::string &key) {
std::lock_guard<std::recursive_mutex> lck(_mtx);
return _map.erase(key);
}
Pointer find(const std::string &key) const {
std::lock_guard<std::recursive_mutex> lck(_mtx);
auto it = _map.find(key);
if (it == _map.end()) {
return nullptr;
}
return it->second;
}
template<class ..._Args>
Pointer make(const std::string &key, _Args&& ...__args) {
// assert(!find(key));
auto server = std::make_shared<Type>(std::forward<_Args>(__args)...);
std::lock_guard<std::recursive_mutex> lck(_mtx);
auto it = _map.emplace(key, server);
assert(it.second);
return server;
}
template<class ..._Args>
Pointer makeWithAction(const std::string &key, function<void(Pointer)> action, _Args&& ...__args) {
// assert(!find(key));
auto server = std::make_shared<Type>(std::forward<_Args>(__args)...);
action(server);
std::lock_guard<std::recursive_mutex> lck(_mtx);
auto it = _map.emplace(key, server);
assert(it.second);
return server;
}
};
//拉流代理器列表
static unordered_map<string, PlayerProxy::Ptr> s_proxyMap;
static recursive_mutex s_proxyMapMtx;
static ServiceController<PlayerProxy> s_player_proxy;
//推流代理器列表
static unordered_map<string, PusherProxy::Ptr> s_proxyPusherMap;
static recursive_mutex s_proxyPusherMapMtx;
static ServiceController<PusherProxy> s_pusher_proxy;
//FFmpeg拉流代理器列表
static unordered_map<string, FFmpegSource::Ptr> s_ffmpegMap;
static recursive_mutex s_ffmpegMapMtx;
static ServiceController<FFmpegSource> s_ffmpeg_src;
#if defined(ENABLE_RTPPROXY)
//rtp服务器列表
static unordered_map<string, RtpServer::Ptr> s_rtpServerMap;
static recursive_mutex s_rtpServerMapMtx;
static ServiceController<RtpServer> s_rtp_server;
#endif
static inline string getProxyKey(const string &vhost, const string &app, const string &stream) {
@ -415,47 +468,24 @@ Value makeMediaSourceJson(MediaSource &media){
}
#if defined(ENABLE_RTPPROXY)
uint16_t openRtpServer(uint16_t local_port, const string &stream_id, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc, bool only_audio, bool multiplex) {
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
if (s_rtpServerMap.find(stream_id) != s_rtpServerMap.end()) {
uint16_t openRtpServer(uint16_t local_port, const string &stream_id, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
if (s_rtp_server.find(stream_id)) {
//为了防止RtpProcess所有权限混乱的问题不允许重复添加相同的stream_id
return 0;
}
RtpServer::Ptr server = std::make_shared<RtpServer>();
server->start(local_port, stream_id, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc, only_audio, multiplex);
auto server = s_rtp_server.makeWithAction(stream_id, [&](RtpServer::Ptr server) {
server->start(local_port, stream_id, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc, only_track, multiplex);
});
server->setOnDetach([stream_id]() {
//设置rtp超时移除事件
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
s_rtpServerMap.erase(stream_id);
s_rtp_server.erase(stream_id);
});
//保存对象
s_rtpServerMap.emplace(stream_id, server);
//回复json
return server->getPort();
}
void connectRtpServer(const string &stream_id, const string &dst_url, uint16_t dst_port, const function<void(const SockException &ex)> &cb) {
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
auto it = s_rtpServerMap.find(stream_id);
if (it == s_rtpServerMap.end()) {
cb(SockException(Err_other, "未找到rtp服务"));
return;
}
it->second->connectToServer(dst_url, dst_port, cb);
}
bool closeRtpServer(const string &stream_id) {
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
auto it = s_rtpServerMap.find(stream_id);
if (it == s_rtpServerMap.end()) {
return false;
}
auto server = it->second;
s_rtpServerMap.erase(it);
return true;
}
#endif
void getStatisticJson(const function<void(Value &val)> &cb) {
@ -546,23 +576,23 @@ void addStreamProxy(const string &vhost, const string &app, const string &stream
const ProtocolOption &option, int rtp_type, float timeout_sec, const mINI &args,
const function<void(const SockException &ex, const string &key)> &cb) {
auto key = getProxyKey(vhost, app, stream);
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
if (s_proxyMap.find(key) != s_proxyMap.end()) {
if (s_player_proxy.find(key)) {
//已经在拉流了
cb(SockException(Err_other, "This stream already exists"), key);
return;
}
//添加拉流代理
auto player = std::make_shared<PlayerProxy>(vhost, app, stream, option, retry_count);
s_proxyMap[key] = player;
auto player = s_player_proxy.make(key, vhost, app, stream, option, retry_count);
// 先透传参数
player->mINI::operator=(args);
// 先透传拷贝参数
for (auto &pr : args) {
(*player)[pr.first] = pr.second;
}
//指定RTP over TCP(播放rtsp时有效)
(*player)[Client::kRtpType] = rtp_type;
if (timeout_sec > 0.1) {
if (timeout_sec > 0.1f) {
//播放握手超时时间
(*player)[Client::kTimeoutMS] = timeout_sec * 1000;
}
@ -570,28 +600,69 @@ void addStreamProxy(const string &vhost, const string &app, const string &stream
//开始播放,如果播放失败或者播放中止,将会自动重试若干次,默认一直重试
player->setPlayCallbackOnce([cb, key](const SockException &ex) {
if (ex) {
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
s_proxyMap.erase(key);
s_player_proxy.erase(key);
}
cb(ex, key);
});
//被主动关闭拉流
player->setOnClose([key](const SockException &ex) {
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
s_proxyMap.erase(key);
s_player_proxy.erase(key);
});
player->play(url);
};
template <typename Type>
static void getArgsValue(const HttpAllArgs<ApiArgsType> &allArgs, const string &key, Type &value) {
auto val = allArgs[key];
if (!val.empty()) {
value = (Type)val;
void addStreamPusherProxy(const string &schema,
const string &vhost,
const string &app,
const string &stream,
const string &url,
int retry_count,
int rtp_type,
float timeout_sec,
const function<void(const SockException &ex, const string &key)> &cb) {
auto key = getPusherKey(schema, vhost, app, stream, url);
auto src = MediaSource::find(schema, vhost, app, stream);
if (!src) {
cb(SockException(Err_other, "can not find the source stream"), key);
return;
}
if (s_pusher_proxy.find(key)) {
//已经在推流了
cb(SockException(Err_success), key);
return;
}
//添加推流代理
auto pusher = s_pusher_proxy.make(key, src, retry_count);
//指定RTP over TCP(播放rtsp时有效)
pusher->emplace(Client::kRtpType, rtp_type);
if (timeout_sec > 0.1f) {
//推流握手超时时间
pusher->emplace(Client::kTimeoutMS, timeout_sec * 1000);
}
//开始推流,如果推流失败或者推流中止,将会自动重试若干次,默认一直重试
pusher->setPushCallbackOnce([cb, key, url](const SockException &ex) {
if (ex) {
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
s_pusher_proxy.erase(key);
}
cb(ex, key);
});
//被主动关闭推流
pusher->setOnClose([key, url](const SockException &ex) {
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
s_pusher_proxy.erase(key);
});
pusher->publish(url);
}
/**
* api接口
* api都支持GET和POST两种方式
@ -657,7 +728,7 @@ void installWebApi() {
CHECK_SECRET();
auto &ini = mINI::Instance();
int changed = API::Success;
for (auto &pr : allArgs.getArgs()) {
for (auto &pr : allArgs.args) {
if (ini.find(pr.first) == ini.end()) {
#if 1
//没有这个key
@ -973,59 +1044,6 @@ void installWebApi() {
val["count_hit"] = (Json::UInt64)count_hit;
});
static auto addStreamPusherProxy = [](const string &schema,
const string &vhost,
const string &app,
const string &stream,
const string &url,
int retry_count,
int rtp_type,
float timeout_sec,
const function<void(const SockException &ex, const string &key)> &cb) {
auto key = getPusherKey(schema, vhost, app, stream, url);
auto src = MediaSource::find(schema, vhost, app, stream);
if (!src) {
cb(SockException(Err_other, "can not find the source stream"), key);
return;
}
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
if (s_proxyPusherMap.find(key) != s_proxyPusherMap.end()) {
//已经在推流了
cb(SockException(Err_success), key);
return;
}
//添加推流代理
auto pusher = std::make_shared<PusherProxy>(src, retry_count);
s_proxyPusherMap[key] = pusher;
//指定RTP over TCP(播放rtsp时有效)
(*pusher)[Client::kRtpType] = rtp_type;
if (timeout_sec > 0.1) {
//推流握手超时时间
(*pusher)[Client::kTimeoutMS] = timeout_sec * 1000;
}
//开始推流,如果推流失败或者推流中止,将会自动重试若干次,默认一直重试
pusher->setPushCallbackOnce([cb, key, url](const SockException &ex) {
if (ex) {
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
s_proxyPusherMap.erase(key);
}
cb(ex, key);
});
//被主动关闭推流
pusher->setOnClose([key, url](const SockException &ex) {
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
s_proxyPusherMap.erase(key);
});
pusher->publish(url);
};
//动态添加rtsp/rtmp推流代理
//测试url http://127.0.0.1/index/api/addStreamPusherProxy?schema=rtmp&vhost=__defaultVhost__&app=proxy&stream=0&dst_url=rtmp://127.0.0.1/live/obs
api_regist("/index/api/addStreamPusherProxy", [](API_ARGS_MAP_ASYNC) {
@ -1058,8 +1076,7 @@ void installWebApi() {
api_regist("/index/api/delStreamPusherProxy", [](API_ARGS_MAP) {
CHECK_SECRET();
CHECK_ARGS("key");
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
val["data"]["flag"] = s_proxyPusherMap.erase(allArgs["key"]) == 1;
val["data"]["flag"] = s_pusher_proxy.erase(allArgs["key"]) == 1;
});
//动态添加rtsp/rtmp拉流代理
@ -1069,7 +1086,7 @@ void installWebApi() {
CHECK_ARGS("vhost","app","stream","url");
mINI args;
for (auto &pr : allArgs.getArgs()) {
for (auto &pr : allArgs.args) {
args.emplace(pr.first, pr.second);
}
@ -1100,8 +1117,7 @@ void installWebApi() {
api_regist("/index/api/delStreamProxy",[](API_ARGS_MAP){
CHECK_SECRET();
CHECK_ARGS("key");
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
val["data"]["flag"] = s_proxyMap.erase(allArgs["key"]) == 1;
val["data"]["flag"] = s_player_proxy.erase(allArgs["key"]) == 1;
});
static auto addFFmpegSource = [](const string &ffmpeg_cmd_key,
@ -1112,25 +1128,21 @@ void installWebApi() {
bool enable_mp4,
const function<void(const SockException &ex, const string &key)> &cb) {
auto key = MD5(dst_url).hexdigest();
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
if (s_ffmpegMap.find(key) != s_ffmpegMap.end()) {
if (s_ffmpeg_src.find(key)) {
//已经在拉流了
cb(SockException(Err_success), key);
return;
}
FFmpegSource::Ptr ffmpeg = std::make_shared<FFmpegSource>();
s_ffmpegMap[key] = ffmpeg;
auto ffmpeg = s_ffmpeg_src.make(key);
ffmpeg->setOnClose([key]() {
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
s_ffmpegMap.erase(key);
s_ffmpeg_src.erase(key);
});
ffmpeg->setupRecordFlag(enable_hls, enable_mp4);
ffmpeg->play(ffmpeg_cmd_key, src_url, dst_url, timeout_ms, [cb, key](const SockException &ex) {
if (ex) {
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
s_ffmpegMap.erase(key);
s_ffmpeg_src.erase(key);
}
cb(ex, key);
});
@ -1164,15 +1176,14 @@ void installWebApi() {
api_regist("/index/api/delFFmpegSource",[](API_ARGS_MAP){
CHECK_SECRET();
CHECK_ARGS("key");
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
val["data"]["flag"] = s_ffmpegMap.erase(allArgs["key"]) == 1;
val["data"]["flag"] = s_ffmpeg_src.erase(allArgs["key"]) == 1;
});
//新增http api下载可执行程序文件接口
//测试url http://127.0.0.1/index/api/downloadBin
api_regist("/index/api/downloadBin",[](API_ARGS_MAP_ASYNC){
CHECK_SECRET();
invoker.responseFile(allArgs.getParser().getHeader(),StrCaseMap(),exePath());
invoker.responseFile(allArgs.parser.getHeader(), StrCaseMap(), exePath());
});
#if defined(ENABLE_RTPPROXY)
@ -1198,12 +1209,17 @@ void installWebApi() {
//兼容老版本请求新版本去除enable_tcp参数并新增tcp_mode参数
tcp_mode = 1;
}
auto only_track = allArgs["only_track"].as<int>();
if (allArgs["only_audio"].as<bool>()) {
// 兼容老版本请求新版本去除only_audio参数并新增only_track参数
only_track = 1;
}
std::string local_ip = "::";
if (!allArgs["local_ip"].empty()) {
local_ip = allArgs["local_ip"];
}
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, allArgs["re_use_port"].as<bool>(),
allArgs["ssrc"].as<uint32_t>(), allArgs["only_audio"].as<bool>());
allArgs["ssrc"].as<uint32_t>(), only_track);
if (port == 0) {
throw InvalidArgsException("该stream_id已存在");
}
@ -1220,11 +1236,16 @@ void installWebApi() {
// 兼容老版本请求新版本去除enable_tcp参数并新增tcp_mode参数
tcp_mode = 1;
}
auto only_track = allArgs["only_track"].as<int>();
if (allArgs["only_audio"].as<bool>()) {
// 兼容老版本请求新版本去除only_audio参数并新增only_track参数
only_track = 1;
}
std::string local_ip = "::";
if (!allArgs["local_ip"].empty()) {
local_ip = allArgs["local_ip"];
}
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, true, 0, allArgs["only_audio"].as<bool>(),true);
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, true, 0, only_track,true);
if (port == 0) {
throw InvalidArgsException("该stream_id已存在");
}
@ -1235,22 +1256,27 @@ void installWebApi() {
api_regist("/index/api/connectRtpServer", [](API_ARGS_MAP_ASYNC) {
CHECK_SECRET();
CHECK_ARGS("stream_id", "dst_url", "dst_port");
connectRtpServer(
allArgs["stream_id"], allArgs["dst_url"], allArgs["dst_port"],
[val, headerOut, invoker](const SockException &ex) mutable {
if (ex) {
val["code"] = API::OtherFailed;
val["msg"] = ex.what();
}
invoker(200, headerOut, val.toStyledString());
});
auto cb = [val, headerOut, invoker](const SockException &ex) mutable {
if (ex) {
val["code"] = API::OtherFailed;
val["msg"] = ex.what();
}
invoker(200, headerOut, val.toStyledString());
};
auto server = s_rtp_server.find(allArgs["stream_id"]);
if (!server) {
cb(SockException(Err_other, "未找到rtp服务"));
return;
}
server->connectToServer(allArgs["dst_url"], allArgs["dst_port"], cb);
});
api_regist("/index/api/closeRtpServer",[](API_ARGS_MAP){
CHECK_SECRET();
CHECK_ARGS("stream_id");
if(!closeRtpServer(allArgs["stream_id"])){
if(s_rtp_server.erase(allArgs["stream_id"]) == 0){
val["hit"] = 0;
return;
}
@ -1261,19 +1287,18 @@ void installWebApi() {
CHECK_SECRET();
CHECK_ARGS("stream_id", "ssrc");
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
auto it = s_rtpServerMap.find(allArgs["stream_id"]);
if (it == s_rtpServerMap.end()) {
auto server = s_rtp_server.find(allArgs["stream_id"]);
if (!server) {
throw ApiRetException("RtpServer not found by stream_id", API::NotFound);
}
it->second->updateSSRC(allArgs["ssrc"]);
server->updateSSRC(allArgs["ssrc"]);
});
api_regist("/index/api/listRtpServer",[](API_ARGS_MAP){
CHECK_SECRET();
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
for (auto &pr : s_rtpServerMap) {
std::lock_guard<std::recursive_mutex> lck(s_rtp_server._mtx);
for (auto &pr : s_rtp_server._map) {
Value obj;
obj["stream_id"] = pr.first;
obj["port"] = pr.second->getPort();
@ -1289,7 +1314,11 @@ void installWebApi() {
if (!src) {
throw ApiRetException("can not find the source stream", API::NotFound);
}
auto type = allArgs["type"].as<int>();
if (!allArgs["use_ps"].empty()) {
// 兼容之前的use_ps参数
type = allArgs["use_ps"].as<int>();
}
MediaSourceEvent::SendRtpArgs args;
args.passive = false;
args.dst_url = allArgs["dst_url"];
@ -1299,11 +1328,11 @@ void installWebApi() {
args.is_udp = allArgs["is_udp"];
args.src_port = allArgs["src_port"];
args.pt = allArgs["pt"].empty() ? 96 : allArgs["pt"].as<int>();
args.use_ps = allArgs["use_ps"].empty() ? true : allArgs["use_ps"].as<bool>();
args.type = (MediaSourceEvent::SendRtpArgs::Type)type;
args.only_audio = allArgs["only_audio"].as<bool>();
args.udp_rtcp_timeout = allArgs["udp_rtcp_timeout"];
args.recv_stream_id = allArgs["recv_stream_id"];
TraceL << "startSendRtp, pt " << int(args.pt) << " ps " << args.use_ps << " audio " << args.only_audio;
TraceL << "startSendRtp, pt " << int(args.pt) << " rtp type " << type << " audio " << args.only_audio;
src->getOwnerPoller()->async([=]() mutable {
src->startSendRtp(args, [val, headerOut, invoker](uint16_t local_port, const SockException &ex) mutable {
@ -1325,6 +1354,11 @@ void installWebApi() {
if (!src) {
throw ApiRetException("can not find the source stream", API::NotFound);
}
auto type = allArgs["type"].as<int>();
if (!allArgs["use_ps"].empty()) {
// 兼容之前的use_ps参数
type = allArgs["use_ps"].as<int>();
}
MediaSourceEvent::SendRtpArgs args;
args.passive = true;
@ -1332,12 +1366,12 @@ void installWebApi() {
args.is_udp = false;
args.src_port = allArgs["src_port"];
args.pt = allArgs["pt"].empty() ? 96 : allArgs["pt"].as<int>();
args.use_ps = allArgs["use_ps"].empty() ? true : allArgs["use_ps"].as<bool>();
args.type = (MediaSourceEvent::SendRtpArgs::Type)type;
args.only_audio = allArgs["only_audio"].as<bool>();
args.recv_stream_id = allArgs["recv_stream_id"];
//tcp被动服务器等待链接超时时间
args.tcp_passive_close_delay_ms = allArgs["close_delay_ms"];
TraceL << "startSendRtpPassive, pt " << int(args.pt) << " ps " << args.use_ps << " audio " << args.only_audio;
TraceL << "startSendRtpPassive, pt " << int(args.pt) << " rtp type " << type << " audio " << args.only_audio;
src->getOwnerPoller()->async([=]() mutable {
src->startSendRtp(args, [val, headerOut, invoker](uint16_t local_port, const SockException &ex) mutable {
@ -1499,18 +1533,11 @@ void installWebApi() {
api_regist("/index/api/getProxyPusherInfo", [](API_ARGS_MAP_ASYNC) {
CHECK_SECRET();
CHECK_ARGS("key");
decltype(s_proxyPusherMap.end()) it;
{
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
it = s_proxyPusherMap.find(allArgs["key"]);
}
if (it == s_proxyPusherMap.end()) {
auto pusher = s_pusher_proxy.find(allArgs["key"]);
if (!pusher) {
throw ApiRetException("can not find pusher", API::NotFound);
}
auto pusher = it->second;
val["data"]["status"] = pusher->getStatus();
val["data"]["liveSecs"] = pusher->getLiveSecs();
val["data"]["rePublishCount"] = pusher->getRePublishCount();
@ -1520,18 +1547,11 @@ void installWebApi() {
api_regist("/index/api/getProxyInfo", [](API_ARGS_MAP_ASYNC) {
CHECK_SECRET();
CHECK_ARGS("key");
decltype(s_proxyMap.end()) it;
{
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
it = s_proxyMap.find(allArgs["key"]);
}
if (it == s_proxyMap.end()) {
auto proxy = s_player_proxy.find(allArgs["key"]);
if (!proxy) {
throw ApiRetException("can not find the proxy", API::NotFound);
}
auto proxy = it->second;
val["data"]["status"] = proxy->getStatus();
val["data"]["liveSecs"] = proxy->getLiveSecs();
val["data"]["rePullCount"] = proxy->getRePullCount();
@ -1670,7 +1690,7 @@ void installWebApi() {
//截图存在,且未过期,那么返回之
res_old_snap = true;
responseSnap(path, allArgs.getParser().getHeader(), invoker);
responseSnap(path, allArgs.parser.getHeader(), invoker);
//中断遍历
return false;
});
@ -1701,7 +1721,7 @@ void installWebApi() {
File::delete_file(new_snap);
rename(new_snap_tmp.data(), new_snap.data());
}
responseSnap(new_snap, allArgs.getParser().getHeader(), invoker, err_msg);
responseSnap(new_snap, allArgs.parser.getHeader(), invoker, err_msg);
});
});
@ -1716,7 +1736,7 @@ void installWebApi() {
#ifdef ENABLE_WEBRTC
class WebRtcArgsImp : public WebRtcArgs {
public:
WebRtcArgsImp(const HttpAllArgs<string> &args, std::string session_id)
WebRtcArgsImp(const ArgsString &args, std::string session_id)
: _args(args)
, _session_id(std::move(session_id)) {}
~WebRtcArgsImp() override = default;
@ -1734,40 +1754,26 @@ void installWebApi() {
CHECK_ARGS("app", "stream");
return StrPrinter << "rtc://" << _args["Host"] << "/" << _args["app"] << "/"
<< _args["stream"] << "?" << _args.getParser().params() + "&session=" + _session_id;
<< _args["stream"] << "?" << _args.parser.params() + "&session=" + _session_id;
}
private:
HttpAllArgs<string> _args;
ArgsString _args;
std::string _session_id;
};
api_regist("/index/api/webrtc",[](API_ARGS_STRING_ASYNC){
CHECK_ARGS("type");
auto type = allArgs["type"];
auto offer = allArgs.getArgs();
auto offer = allArgs.args;
CHECK(!offer.empty(), "http body(webrtc offer sdp) is empty");
std::string host = allArgs.getParser()["Host"];
std::string localIp = host.substr(0, host.find(':'));
auto isVaildIP = [](std::string ip)-> bool {
int a,b,c,d;
return sscanf(ip.c_str(),"%d.%d.%d.%d", &a, &b, &c, &d) == 4;
};
if (!isVaildIP(localIp) || localIp=="127.0.0.1") {
localIp = "";
}
auto &session = static_cast<Session&>(sender);
auto args = std::make_shared<WebRtcArgsImp>(allArgs, sender.getIdentifier());
WebRtcPluginManager::Instance().getAnswerSdp(static_cast<Session&>(sender), type, *args, [invoker, val, offer, headerOut, localIp](const WebRtcInterface &exchanger) mutable {
//设置返回类型
headerOut["Content-Type"] = HttpFileManager::getContentType(".json");
//设置跨域
headerOut["Access-Control-Allow-Origin"] = "*";
WebRtcPluginManager::Instance().negotiateSdp(session, type, *args, [invoker, val, offer, headerOut](const WebRtcInterface &exchanger) mutable {
auto &handler = const_cast<WebRtcInterface &>(exchanger);
try {
setLocalIp(exchanger,localIp);
val["sdp"] = exchangeSdp(exchanger, offer);
val["sdp"] = handler.getAnswerSdp(offer);
val["id"] = exchanger.getIdentifier();
val["type"] = "answer";
invoker(200, headerOut, val.toStyledString());
@ -1781,26 +1787,24 @@ void installWebApi() {
static constexpr char delete_webrtc_url [] = "/index/api/delete_webrtc";
static auto whip_whep_func = [](const char *type, API_ARGS_STRING_ASYNC) {
auto offer = allArgs.getArgs();
auto offer = allArgs.args;
CHECK(!offer.empty(), "http body(webrtc offer sdp) is empty");
auto &session = static_cast<Session&>(sender);
auto location = std::string("http") + (session.overSsl() ? "s" : "") + "://" + allArgs["host"] + delete_webrtc_url;
auto location = std::string(session.overSsl() ? "https://" : "http://") + allArgs["host"] + delete_webrtc_url;
auto args = std::make_shared<WebRtcArgsImp>(allArgs, sender.getIdentifier());
WebRtcPluginManager::Instance().getAnswerSdp(session, type, *args,
[invoker, offer, headerOut, location](const WebRtcInterface &exchanger) mutable {
// 设置跨域
headerOut["Access-Control-Allow-Origin"] = "*";
try {
// 设置返回类型
headerOut["Content-Type"] = "application/sdp";
headerOut["Location"] = location + "?id=" + exchanger.getIdentifier() + "&token=" + exchanger.deleteRandStr();
invoker(201, headerOut, exchangeSdp(exchanger, offer));
} catch (std::exception &ex) {
headerOut["Content-Type"] = "text/plain";
invoker(406, headerOut, ex.what());
}
});
WebRtcPluginManager::Instance().negotiateSdp(session, type, *args, [invoker, offer, headerOut, location](const WebRtcInterface &exchanger) mutable {
auto &handler = const_cast<WebRtcInterface &>(exchanger);
try {
// 设置返回类型
headerOut["Content-Type"] = "application/sdp";
headerOut["Location"] = location + "?id=" + exchanger.getIdentifier() + "&token=" + exchanger.deleteRandStr();
invoker(201, headerOut, handler.getAnswerSdp(offer));
} catch (std::exception &ex) {
headerOut["Content-Type"] = "text/plain";
invoker(406, headerOut, ex.what());
}
});
};
api_regist("/index/api/whip", [](API_ARGS_STRING_ASYNC) { whip_whep_func("push", API_ARGS_VALUE, invoker); });
@ -1808,7 +1812,7 @@ void installWebApi() {
api_regist(delete_webrtc_url, [](API_ARGS_MAP_ASYNC) {
CHECK_ARGS("id", "token");
CHECK(allArgs.getParser().method() == "DELETE", "http method is not DELETE: " + allArgs.getParser().method());
CHECK(allArgs.parser.method() == "DELETE", "http method is not DELETE: " + allArgs.parser.method());
auto obj = WebRtcTransportManager::Instance().getItem(allArgs["id"]);
if (!obj) {
invoker(404, headerOut, "id not found");
@ -1858,7 +1862,7 @@ void installWebApi() {
std::set<std::string> ret;
auto vec = toolkit::split(str, ";");
for (auto &item : vec) {
auto root = File::absolutePath(item, "", true);
auto root = File::absolutePath("", item, true);
ret.emplace(std::move(root));
}
return ret;
@ -1894,44 +1898,50 @@ void installWebApi() {
if (!save_name.empty()) {
res_header.emplace("Content-Disposition", "attachment;filename=\"" + save_name + "\"");
}
invoker.responseFile(allArgs.getParser().getHeader(), res_header, allArgs["file_path"]);
invoker.responseFile(allArgs.parser.getHeader(), res_header, allArgs["file_path"]);
}
};
bool flag = NOTICE_EMIT(BroadcastHttpAccessArgs, Broadcast::kBroadcastHttpAccess, allArgs.getParser(), file_path, false, file_invoker, sender);
bool flag = NOTICE_EMIT(BroadcastHttpAccessArgs, Broadcast::kBroadcastHttpAccess, allArgs.parser, file_path, false, file_invoker, sender);
if (!flag) {
// 文件下载鉴权事件无人监听,不允许下载
invoker(401, StrCaseMap {}, "None http access event listener");
}
});
#if defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
VideoStackManager::Instance().loadBgImg("novideo.yuv");
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastStreamNoneReader, [](BroadcastStreamNoneReaderArgs) {
auto id = sender.getMediaTuple().stream;
VideoStackManager::Instance().stopVideoStack(id);
});
api_regist("/index/api/stack/start", [](API_ARGS_JSON_ASYNC) {
CHECK_SECRET();
auto ret = VideoStackManager::Instance().startVideoStack(allArgs.args());
val["code"] = ret;
val["msg"] = ret ? "failed" : "success";
invoker(200, headerOut, val.toStyledString());
});
api_regist("/index/api/stack/stop", [](API_ARGS_MAP_ASYNC) {
CHECK_SECRET();
CHECK_ARGS("id");
auto ret = VideoStackManager::Instance().stopVideoStack(allArgs["id"]);
val["code"] = ret;
val["msg"] = ret ? "failed" : "success";
invoker(200, headerOut, val.toStyledString());
});
#endif
}
void unInstallWebApi(){
{
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
auto proxyMap(std::move(s_proxyMap));
proxyMap.clear();
}
{
lock_guard<recursive_mutex> lck(s_ffmpegMapMtx);
auto ffmpegMap(std::move(s_ffmpegMap));
ffmpegMap.clear();
}
{
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
auto proxyPusherMap(std::move(s_proxyPusherMap));
proxyPusherMap.clear();
}
{
s_player_proxy.clear();
s_ffmpeg_src.clear();
s_pusher_proxy.clear();
#if defined(ENABLE_RTPPROXY)
RtpSelector::Instance().clear();
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
auto rtpServerMap(std::move(s_rtpServerMap));
rtpServerMap.clear();
s_rtp_server.clear();
#endif
}
NoticeCenter::Instance().delListener(&web_api_tag);
}

View File

@ -115,72 +115,41 @@ std::string getValue(const mediakit::Parser &parser, Args &args, const First &fi
template<typename Args>
class HttpAllArgs {
mediakit::Parser* _parser = nullptr;
Args* _args = nullptr;
public:
HttpAllArgs(const mediakit::Parser &parser, Args &args) {
_get_args = [&args]() {
return (void *) &args;
};
_get_parser = [&parser]() -> const mediakit::Parser & {
return parser;
};
_get_value = [](HttpAllArgs &that, const std::string &key) {
return getValue(that.getParser(), that.getArgs(), key);
};
_clone = [&](HttpAllArgs &that) {
that._get_args = [args]() {
return (void *) &args;
};
that._get_parser = [parser]() -> const mediakit::Parser & {
return parser;
};
that._get_value = [](HttpAllArgs &that, const std::string &key) {
return getValue(that.getParser(), that.getArgs(), key);
};
that._cache_able = true;
};
}
const mediakit::Parser& parser;
Args& args;
HttpAllArgs(const HttpAllArgs &that) {
if (that._cache_able) {
_get_args = that._get_args;
_get_parser = that._get_parser;
_get_value = that._get_value;
_cache_able = true;
} else {
that._clone(*this);
HttpAllArgs(const mediakit::Parser &p, Args &a): parser(p), args(a) {}
HttpAllArgs(const HttpAllArgs &that): _parser(new mediakit::Parser(that.parser)),
_args(new Args(that.args)),
parser(*_parser), args(*_args) {}
~HttpAllArgs() {
if (_parser) {
delete _parser;
}
if (_args) {
delete _args;
}
}
template<typename Key>
toolkit::variant operator[](const Key &key) const {
return (toolkit::variant)_get_value(*(HttpAllArgs*)this, key);
return (toolkit::variant)getValue(parser, args, key);
}
const mediakit::Parser &getParser() const {
return _get_parser();
}
Args &getArgs() {
return *((Args *) _get_args());
}
const Args &getArgs() const {
return *((Args *) _get_args());
}
private:
bool _cache_able = false;
std::function<void *() > _get_args;
std::function<const mediakit::Parser &() > _get_parser;
std::function<std::string(HttpAllArgs &that, const std::string &key)> _get_value;
std::function<void(HttpAllArgs &that) > _clone;
};
#define API_ARGS_MAP toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const HttpAllArgs<ApiArgsType> &allArgs, Json::Value &val
using ArgsMap = HttpAllArgs<ApiArgsType>;
using ArgsJson = HttpAllArgs<Json::Value>;
using ArgsString = HttpAllArgs<std::string>;
#define API_ARGS_MAP toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const ArgsMap &allArgs, Json::Value &val
#define API_ARGS_MAP_ASYNC API_ARGS_MAP, const mediakit::HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_JSON toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const HttpAllArgs<Json::Value> &allArgs, Json::Value &val
#define API_ARGS_JSON toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const ArgsJson &allArgs, Json::Value &val
#define API_ARGS_JSON_ASYNC API_ARGS_JSON, const mediakit::HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_STRING toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const HttpAllArgs<std::string> &allArgs, Json::Value &val
#define API_ARGS_STRING toolkit::SockInfo &sender, mediakit::HttpSession::KeyValue &headerOut, const ArgsString &allArgs, Json::Value &val
#define API_ARGS_STRING_ASYNC API_ARGS_STRING, const mediakit::HttpSession::HttpResponseInvoker &invoker
#define API_ARGS_VALUE sender, headerOut, allArgs, val
@ -233,9 +202,7 @@ void installWebApi();
void unInstallWebApi();
#if defined(ENABLE_RTPPROXY)
uint16_t openRtpServer(uint16_t local_port, const std::string &stream_id, int tcp_mode, const std::string &local_ip, bool re_use_port, uint32_t ssrc, bool only_audio, bool multiplex=false);
void connectRtpServer(const std::string &stream_id, const std::string &dst_url, uint16_t dst_port, const std::function<void(const toolkit::SockException &ex)> &cb);
bool closeRtpServer(const std::string &stream_id);
uint16_t openRtpServer(uint16_t local_port, const std::string &stream_id, int tcp_mode, const std::string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex=false);
#endif
Json::Value makeMediaSourceJson(mediakit::MediaSource &media);

View File

@ -38,7 +38,7 @@
#endif
#if defined(ENABLE_VERSION)
#include "version.h"
#include "ZLMVersion.h"
#endif
#if !defined(_WIN32)
@ -392,8 +392,8 @@ int start_main(int argc,char *argv[]) {
#endif//defined(ENABLE_WEBRTC)
#if defined(ENABLE_SRT)
// srt udp服务器
if(srtPort) { srtSrv->start<SRT::SrtSession>(srtPort); }
// srt udp服务器
if (srtPort) { srtSrv->start<SRT::SrtSession>(srtPort); }
#endif//defined(ENABLE_SRT)
} catch (std::exception &ex) {

View File

@ -133,7 +133,7 @@ void MediaSink::checkTrackIfReady() {
}
GET_CONFIG(uint32_t, kMaxAddTrackMS, General::kWaitAddTrackMS);
if (_track_map.size() == 1 && _ticker.elapsedTime() > kMaxAddTrackMS) {
if (_track_map.size() == 1 && (_ticker.elapsedTime() > kMaxAddTrackMS || !_enable_audio)) {
// 如果只有一个Track那么在该Track添加后我们最多还等待若干时间(可能后面还会添加Track)
emitAllTrackReady();
return;
@ -187,6 +187,8 @@ void MediaSink::emitAllTrackReady() {
pr.second.for_each([&](const Frame::Ptr &frame) { MediaSink::inputFrame(frame); });
}
_frame_unread.clear();
} else {
throw toolkit::SockException(toolkit::Err_shutdown, "no vaild track data");
}
}

View File

@ -92,10 +92,11 @@ public:
class SendRtpArgs {
public:
enum Type { kRtpRAW = 0, kRtpPS = 1, kRtpTS = 2 };
// 是否采用udp方式发送rtp
bool is_udp = true;
// rtp采用ps还是es方式
bool use_ps = true;
// rtp类型
Type type = kRtpPS;
//发送es流时指定是否只发送纯音频流
bool only_audio = false;
//tcp被动方式
@ -135,6 +136,15 @@ private:
toolkit::Timer::Ptr _async_close_timer;
};
template <typename MAP, typename KEY, typename TYPE>
static void getArgsValue(const MAP &allArgs, const KEY &key, TYPE &value) {
auto val = ((MAP &)allArgs)[key];
if (!val.empty()) {
value = (TYPE)val;
}
}
class ProtocolOption {
public:
ProtocolOption();
@ -242,15 +252,6 @@ public:
GET_OPT_VALUE(stream_replace);
GET_OPT_VALUE(max_track);
}
private:
template <typename MAP, typename KEY, typename TYPE>
static void getArgsValue(const MAP &allArgs, const KEY &key, TYPE &value) {
auto val = ((MAP &)allArgs)[key];
if (!val.empty()) {
value = (TYPE)val;
}
}
};
//该对象用于拦截感兴趣的MediaSourceEvent事件

View File

@ -44,6 +44,7 @@ public:
}
void resetTimer(const EventPoller::Ptr &poller) {
std::lock_guard<std::recursive_mutex> lck(_mtx);
std::weak_ptr<FramePacedSender> weak_self = shared_from_this();
_timer = std::make_shared<Timer>(_paced_sender_ms / 1000.0f, [weak_self]() {
if (auto strong_self = weak_self.lock()) {
@ -55,6 +56,7 @@ public:
}
bool inputFrame(const Frame::Ptr &frame) override {
std::lock_guard<std::recursive_mutex> lck(_mtx);
if (!_timer) {
setCurrentStamp(frame->dts());
resetTimer(EventPoller::getCurrentPoller());
@ -66,6 +68,7 @@ public:
private:
void onTick() {
std::lock_guard<std::recursive_mutex> lck(_mtx);
auto dst = _cache.empty() ? 0 : _cache.back().first;
while (!_cache.empty()) {
auto &front = _cache.front();
@ -110,6 +113,7 @@ private:
OnFrame _cb;
Ticker _ticker;
Timer::Ptr _timer;
std::recursive_mutex _mtx;
std::list<std::pair<uint64_t, Frame::Ptr>> _cache;
};
@ -593,15 +597,17 @@ void MultiMediaSourceMuxer::resetTracks() {
}
}
bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame) {
bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) {
auto frame = frame_in;
if (_option.modify_stamp != ProtocolOption::kModifyStampOff) {
// 时间戳不采用原始的绝对时间戳
const_cast<Frame::Ptr&>(frame) = std::make_shared<FrameStamp>(frame, _stamps[frame->getIndex()], _option.modify_stamp);
frame = std::make_shared<FrameStamp>(frame, _stamps[frame->getIndex()], _option.modify_stamp);
}
return _paced_sender ? _paced_sender->inputFrame(frame) : onTrackFrame_l(frame);
}
bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame) {
bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame_in) {
auto frame = frame_in;
bool ret = false;
if (_rtmp) {
ret = _rtmp->inputFrame(frame) ? true : ret;
@ -629,7 +635,7 @@ bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame) {
}
if (_ring) {
// 此场景由于直接转发可能存在切换线程引起的数据被缓存在管道所以需要CacheAbleFrame
const_cast<Frame::Ptr &>(frame) = Frame::getCacheAbleFrame(frame);
frame = Frame::getCacheAbleFrame(frame);
if (frame->getTrackType() == TrackVideo) {
// 视频时遇到第一帧配置帧或关键帧则标记为gop开始处
auto video_key_pos = frame->keyFrame() || frame->configFrame();

View File

@ -294,8 +294,8 @@ void RtspUrl::setup(bool is_ssl, const string &url, const string &user, const st
splitUrl(ip, ip, port);
_url = std::move(url);
_user = strCoding::UrlDecodeComponent(std::move(user));
_passwd = strCoding::UrlDecodeComponent(std::move(passwd));
_user = strCoding::UrlDecodeComponent(user);
_passwd = strCoding::UrlDecodeComponent(passwd);
_host = std::move(ip);
_port = port;
_is_ssl = is_ssl;

View File

@ -30,7 +30,7 @@ struct StrCaseCompare {
class StrCaseMap : public std::multimap<std::string, std::string, StrCaseCompare> {
public:
using Super = multimap<std::string, std::string, StrCaseCompare>;
using Super = std::multimap<std::string, std::string, StrCaseCompare>;
std::string &operator[](const std::string &k) {
auto it = find(k);

View File

@ -58,6 +58,12 @@ const string kBroadcastStreamNoneReader = "kBroadcastStreamNoneReader";
const string kBroadcastHttpBeforeAccess = "kBroadcastHttpBeforeAccess";
const string kBroadcastSendRtpStopped = "kBroadcastSendRtpStopped";
const string kBroadcastRtpServerTimeout = "kBroadcastRtpServerTimeout";
const string kBroadcastRtcSctpConnecting = "kBroadcastRtcSctpConnecting";
const string kBroadcastRtcSctpConnected = "kBroadcastRtcSctpConnected";
const string kBroadcastRtcSctpFailed = "kBroadcastRtcSctpFailed";
const string kBroadcastRtcSctpClosed = "kBroadcastRtcSctpClosed";
const string kBroadcastRtcSctpSend = "kBroadcastRtcSctpSend";
const string kBroadcastRtcSctpReceived = "kBroadcastRtcSctpReceived";
} // namespace Broadcast
@ -291,6 +297,7 @@ const string kSampleMS = RECORD_FIELD "sampleMS";
const string kFileBufSize = RECORD_FIELD "fileBufSize";
const string kFastStart = RECORD_FIELD "fastStart";
const string kFileRepeat = RECORD_FIELD "fileRepeat";
const string kEnableFmp4 = RECORD_FIELD "enableFmp4";
static onceToken token([]() {
mINI::Instance()[kAppName] = "record";
@ -298,6 +305,7 @@ static onceToken token([]() {
mINI::Instance()[kFileBufSize] = 64 * 1024;
mINI::Instance()[kFastStart] = false;
mINI::Instance()[kFileRepeat] = false;
mINI::Instance()[kEnableFmp4] = false;
});
} // namespace Record
@ -338,6 +346,8 @@ const string kH265PT = RTP_PROXY_FIELD "h265_pt";
const string kPSPT = RTP_PROXY_FIELD "ps_pt";
const string kOpusPT = RTP_PROXY_FIELD "opus_pt";
const string kGopCache = RTP_PROXY_FIELD "gop_cache";
const string kRtpG711DurMs = RTP_PROXY_FIELD "rtp_g711_dur_ms";
const string kUdpRecvSocketBuffer = RTP_PROXY_FIELD "udp_recv_socket_buffer";
static onceToken token([]() {
mINI::Instance()[kDumpDir] = "";
@ -348,6 +358,8 @@ static onceToken token([]() {
mINI::Instance()[kPSPT] = 96;
mINI::Instance()[kOpusPT] = 100;
mINI::Instance()[kGopCache] = 1;
mINI::Instance()[kRtpG711DurMs] = 100;
mINI::Instance()[kUdpRecvSocketBuffer] = 4 * 1024 * 1024;
});
} // namespace RtpProxy

View File

@ -109,6 +109,21 @@ extern const std::string kBroadcastReloadConfig;
extern const std::string kBroadcastRtpServerTimeout;
#define BroadcastRtpServerTimeoutArgs uint16_t &local_port, const string &stream_id,int &tcp_mode, bool &re_use_port, uint32_t &ssrc
// rtc transport sctp 连接状态
extern const std::string kBroadcastRtcSctpConnecting;
extern const std::string kBroadcastRtcSctpConnected;
extern const std::string kBroadcastRtcSctpFailed;
extern const std::string kBroadcastRtcSctpClosed;
#define BroadcastRtcSctpConnectArgs WebRtcTransport& sender
// rtc transport sctp 发送数据
extern const std::string kBroadcastRtcSctpSend;
#define BroadcastRtcSctpSendArgs WebRtcTransport& sender, const uint8_t *&data, size_t& len
// rtc transport sctp 接收数据
extern const std::string kBroadcastRtcSctpReceived;
#define BroadcastRtcSctpReceivedArgs WebRtcTransport& sender, uint16_t &streamId, uint32_t &ppid, const uint8_t *&msg, size_t &len
#define ReloadConfigTag ((void *)(0xFF))
#define RELOAD_KEY(arg, key) \
do { \
@ -339,6 +354,8 @@ extern const std::string kFileBufSize;
extern const std::string kFastStart;
// mp4文件是否重头循环读取
extern const std::string kFileRepeat;
// mp4录制文件是否采用fmp4格式
extern const std::string kEnableFmp4;
} // namespace Record
////////////HLS相关配置///////////
@ -382,6 +399,11 @@ extern const std::string kPSPT;
extern const std::string kOpusPT;
// RtpSender相关功能是否提前开启gop缓存优化级联秒开体验默认开启
extern const std::string kGopCache;
//国标发送g711 rtp 打包时每个包的语音时长是多少默认是100 ms范围为20~180ms (gb28181-2016c.2.4规定)
//最好为20 的倍数程序自动向20的倍数取整
extern const std::string kRtpG711DurMs;
// udp recv socket buffer size
extern const std::string kUdpRecvSocketBuffer;
} // namespace RtpProxy
/**

View File

@ -14,7 +14,7 @@
using namespace toolkit;
#if defined(ENABLE_VERSION)
#include "version.h"
#include "ZLMVersion.h"
#endif
extern "C" {

View File

@ -36,6 +36,16 @@
#define CHECK(exp, ...) ::mediakit::Assert_ThrowCpp(!(exp), #exp, __FUNCTION__, __FILE__, __LINE__, ##__VA_ARGS__)
#endif // CHECK
#ifndef CHECK_RET
#define CHECK_RET(...) \
try { \
CHECK(__VA_ARGS__); \
} catch (AssertFailedException & ex) { \
WarnL << ex.what(); \
return; \
}
#endif
#ifndef MAX
#define MAX(a, b) ((a) > (b) ? (a) : (b))
#endif // MAX

View File

@ -53,22 +53,6 @@ char HexStrToBin(const char *str) {
return (high << 4) | low;
}
string strCoding::UrlEncode(const string &str) {
string out;
size_t len = str.size();
for (size_t i = 0; i < len; ++i) {
char ch = str[i];
if (isalnum((uint8_t) ch)) {
out.push_back(ch);
} else {
char buf[4];
sprintf(buf, "%%%X%X", (uint8_t) ch >> 4, (uint8_t) ch & 0x0F);
out.append(buf);
}
}
return out;
}
string strCoding::UrlEncodePath(const string &str) {
const char *dont_escape = "!#&'*+:=?@/._-$,;~()";
string out;
@ -79,7 +63,7 @@ string strCoding::UrlEncodePath(const string &str) {
out.push_back(ch);
} else {
char buf[4];
sprintf(buf, "%%%X%X", (uint8_t) ch >> 4, (uint8_t) ch & 0x0F);
snprintf(buf, 4, "%%%X%X", (uint8_t) ch >> 4, (uint8_t) ch & 0x0F);
out.append(buf);
}
}
@ -96,39 +80,13 @@ string strCoding::UrlEncodeComponent(const string &str) {
out.push_back(ch);
} else {
char buf[4];
sprintf(buf, "%%%X%X", (uint8_t) ch >> 4, (uint8_t) ch & 0x0F);
snprintf(buf, 4, "%%%X%X", (uint8_t) ch >> 4, (uint8_t) ch & 0x0F);
out.append(buf);
}
}
return out;
}
string strCoding::UrlDecode(const string &str) {
string output;
size_t i = 0, len = str.length();
while (i < len) {
if (str[i] == '%') {
if (i + 3 > len) {
// %后面必须还有两个字节才会反转义
output.append(str, i, len - i);
break;
}
char ch = HexStrToBin(&(str[i + 1]));
if (ch == -1) {
// %后面两个字节不是16进制字符串转义失败直接拼接3个原始字符
output.append(str, i, 3);
} else {
output += ch;
}
i += 3;
} else {
output += str[i];
++i;
}
}
return output;
}
string strCoding::UrlDecodePath(const string &str) {
const char *dont_unescape = "#$&+,/:;=?@";
string output;
@ -185,27 +143,6 @@ std::string strCoding::UrlDecodeComponent(const std::string &str) {
return output;
}
#if 0
#include "Util/onceToken.h"
static toolkit::onceToken token([]() {
auto str0 = strCoding::UrlDecode(
"rtsp%3A%2F%2Fadmin%3AJm13317934%25jm%40111.47.84.69%3A554%2FStreaming%2FChannels%2F101%3Ftransportmode%3Dunicast%26amp%3Bprofile%3DProfile_1");
auto str1 = strCoding::UrlDecode("%j1"); // 测试%后面两个字节不是16进制字符串
auto str2 = strCoding::UrlDecode("%a"); // 测试%后面字节数不够
auto str3 = strCoding::UrlDecode("%"); // 测试只有%
auto str4 = strCoding::UrlDecode("%%%"); // 测试多个%
auto str5 = strCoding::UrlDecode("%%%%40"); // 测试多个非法%后恢复正常解析
auto str6 = strCoding::UrlDecode("Jm13317934%jm"); // 测试多个非法%后恢复正常解析
cout << str0 << endl;
cout << str1 << endl;
cout << str2 << endl;
cout << str3 << endl;
cout << str4 << endl;
cout << str5 << endl;
cout << str6 << endl;
});
#endif
///////////////////////////////windows专用///////////////////////////////////
#if defined(_WIN32)
void UnicodeToGB2312(char* pOut, wchar_t uData)

View File

@ -18,10 +18,8 @@ namespace mediakit {
class strCoding {
public:
[[deprecated]] static std::string UrlEncode(const std::string &str); //url utf8编码, deprecated
static std::string UrlEncodePath(const std::string &str); //url路径 utf8编码
static std::string UrlEncodeComponent(const std::string &str); // url参数 utf8编码
[[deprecated]] static std::string UrlDecode(const std::string &str); //url utf8解码, deprecated
static std::string UrlDecodePath(const std::string &str); //url路径 utf8解码
static std::string UrlDecodeComponent(const std::string &str); // url参数 utf8解码
#if defined(_WIN32)

View File

@ -65,18 +65,18 @@ void HttpRequestSplitter::input(const char *data,size_t len) {
_content_len = onRecvHeader(header_ptr, header_size);
}
if(_remain_data_size <= 0){
//没有剩余数据,清空缓存
_remain_data.clear();
return;
}
/*
*
* HttpRequestSplitter::reset()
*/
tail_ref = tail_tmp;
if(_remain_data_size <= 0){
//没有剩余数据,清空缓存
_remain_data.clear();
return;
}
if(_content_len == 0){
//尚未找到http头缓存定位到剩余数据部分
_remain_data.assign(ptr,_remain_data_size);

View File

@ -683,18 +683,6 @@ void HttpSession::sendResponse(int code,
AsyncSender::onSocketFlushed(data);
}
string HttpSession::urlDecode(const string &str) {
auto ret = strCoding::UrlDecode(str);
#ifdef _WIN32
GET_CONFIG(string, charSet, Http::kCharSet);
bool isGb2312 = !strcasecmp(charSet.data(), "gb2312");
if (isGb2312) {
ret = strCoding::UTF8ToGB2312(ret);
}
#endif // _WIN32
return ret;
}
string HttpSession::urlDecodePath(const string &str) {
auto ret = strCoding::UrlDecodePath(str);
#ifdef _WIN32

View File

@ -44,7 +44,6 @@ public:
void onRecv(const toolkit::Buffer::Ptr &) override;
void onError(const toolkit::SockException &err) override;
void onManager() override;
[[deprecated]] static std::string urlDecode(const std::string &str);
static std::string urlDecodePath(const std::string &str);
static std::string urlDecodeComponent(const std::string &str);
void setTimeoutSec(size_t second);

View File

@ -72,7 +72,7 @@ void HlsMakerImp::clearCache(bool immediately, bool eof) {
std::list<std::string> lst;
lst.emplace_back(_path_hls);
lst.emplace_back(_path_hls_delay);
if (!_path_init.empty()) {
if (!_path_init.empty() && eof) {
lst.emplace_back(_path_init);
}
for (auto &pr : _segment_file_paths) {

View File

@ -31,7 +31,8 @@ void MP4Muxer::openMP4(const string &file) {
MP4FileIO::Writer MP4Muxer::createWriter() {
GET_CONFIG(bool, mp4FastStart, Record::kFastStart);
return _mp4_file->createWriter(mp4FastStart ? MOV_FLAG_FASTSTART : 0, false);
GET_CONFIG(bool, recordEnableFmp4, Record::kEnableFmp4);
return _mp4_file->createWriter(mp4FastStart ? MOV_FLAG_FASTSTART : 0, recordEnableFmp4);
}
void MP4Muxer::closeMP4() {

View File

@ -117,11 +117,13 @@ bool MP4Recorder::inputFrame(const Frame::Ptr &frame) {
if (!(_have_video && frame->getTrackType() == TrackAudio)) {
//如果有视频且输入的是音频,那么应该忽略切片逻辑
if (_last_dts == 0 || _last_dts > frame->dts()) {
//极少情况下dts时间戳可能回退
_last_dts = frame->dts();
//b帧情况下dts时间戳可能回退
_last_dts = MAX(frame->dts(), _last_dts);
}
auto duration = 5; // 默认至少一帧5ms
if (frame->dts() > 0 && frame->dts() > _last_dts) {
duration = MAX(duration, frame->dts() - _last_dts);
}
auto duration = frame->dts() - _last_dts;
if (!_muxer || ((duration > _max_second * 1000) && (!_have_video || (_have_video && frame->keyFrame())))) {
//成立条件
// 1、_muxer为空

View File

@ -29,7 +29,13 @@ public:
getRtmpRing()->setDelegate(_media_src);
}
~RtmpMediaSourceMuxer() override { RtmpMuxer::flush(); }
~RtmpMediaSourceMuxer() override {
try {
RtmpMuxer::flush();
} catch (std::exception &ex) {
WarnL << ex.what();
}
}
void setListener(const std::weak_ptr<MediaSourceEvent> &listener){
setDelegate(listener);

View File

@ -19,9 +19,17 @@ using namespace toolkit;
namespace mediakit{
PSEncoderImp::PSEncoderImp(uint32_t ssrc, uint8_t payload_type) : MpegMuxer(true) {
GET_CONFIG(uint32_t,video_mtu,Rtp::kVideoMtuSize);
PSEncoderImp::PSEncoderImp(uint32_t ssrc, uint8_t payload_type, bool ps_or_ts) : MpegMuxer(ps_or_ts) {
GET_CONFIG(uint32_t, s_video_mtu, Rtp::kVideoMtuSize);
_rtp_encoder = std::make_shared<CommonRtpEncoder>();
auto video_mtu = s_video_mtu;
if (!ps_or_ts) {
// 确保ts rtp负载部分长度是188的倍数
video_mtu = RtpPacket::kRtpHeaderSize + (s_video_mtu - (s_video_mtu % 188));
if (video_mtu > s_video_mtu) {
video_mtu -= 188;
}
}
_rtp_encoder->setRtpInfo(ssrc, video_mtu, 90000, payload_type);
auto ring = std::make_shared<RtpRing::RingType>();
ring->setDelegate(std::make_shared<RingDelegateHelper>([this](RtpPacket::Ptr rtp, bool is_key) { onRTP(std::move(rtp), is_key); }));

View File

@ -16,11 +16,19 @@
#include "Record/MPEG.h"
#include "Common/MediaSink.h"
namespace mediakit{
namespace mediakit {
class CommonRtpEncoder;
class PSEncoderImp : public MpegMuxer{
class PSEncoderImp : public MpegMuxer {
public:
PSEncoderImp(uint32_t ssrc, uint8_t payload_type = 96);
/**
* psh或ts rtp编码器
* @param ssrc rtp的ssrc
* @param payload_type rtp的pt
* @param ps_or_ts true: ps, false: ts
*/
PSEncoderImp(uint32_t ssrc, uint8_t payload_type = 96, bool ps_or_ts = true);
~PSEncoderImp() override;
protected:

View File

@ -34,6 +34,12 @@ bool RawEncoderImp::addTrack(const Track::Ptr &track) {
auto ring = std::make_shared<RtpRing::RingType>();
ring->setDelegate(std::make_shared<RingDelegateHelper>([this](RtpPacket::Ptr rtp, bool is_key) { onRTP(std::move(rtp), true); }));
_rtp_encoder->setRtpRing(std::move(ring));
if (track->getCodecId() == CodecG711A || track->getCodecId() == CodecG711U) {
GET_CONFIG(uint32_t, dur_ms, RtpProxy::kRtpG711DurMs);
Any param;
param.set<uint32_t>(dur_ms);
_rtp_encoder->setOpt(RtpCodec::RTP_ENCODER_PKT_DUR_MS, param);
}
return true;
}

View File

@ -40,7 +40,9 @@ private:
class RtpCachePS : public RtpCache, public PSEncoderImp {
public:
RtpCachePS(onFlushed cb, uint32_t ssrc, uint8_t payload_type = 96) : RtpCache(std::move(cb)), PSEncoderImp(ssrc, payload_type) {};
RtpCachePS(onFlushed cb, uint32_t ssrc, uint8_t payload_type = 96, bool ps_or_ts = true) :
RtpCache(std::move(cb)), PSEncoderImp(ssrc, ps_or_ts ? payload_type : Rtsp::PT_MP2T, ps_or_ts) {};
void flush() override;
protected:
@ -56,6 +58,7 @@ protected:
void onRTP(toolkit::Buffer::Ptr rtp, bool is_key = false) override;
};
}//namespace mediakit
} //namespace mediakit
#endif//ENABLE_RTPPROXY
#endif //ZLMEDIAKIT_RTPCACHE_H

View File

@ -199,8 +199,8 @@ void RtpProcess::setStopCheckRtp(bool is_check){
}
}
void RtpProcess::setOnlyAudio(bool only_audio){
_only_audio = only_audio;
void RtpProcess::setOnlyTrack(OnlyTrack only_track) {
_only_track = only_track;
}
void RtpProcess::onDetach() {
@ -259,8 +259,10 @@ void RtpProcess::emitOnPublish() {
if (!option.stream_replace.empty()) {
RtpSelector::Instance().addStreamReplace(strong_self->_media_info.stream, option.stream_replace);
}
if (strong_self->_only_audio) {
strong_self->_muxer->setOnlyAudio();
switch (strong_self->_only_track) {
case kOnlyAudio: strong_self->_muxer->setOnlyAudio(); break;
case kOnlyVideo: strong_self->_muxer->enableAudio(false); break;
default: break;
}
strong_self->_muxer->setMediaListener(strong_self);
strong_self->doCachedFunc();

View File

@ -24,6 +24,7 @@ public:
friend class RtpProcessHelper;
RtpProcess(const std::string &stream_id);
~RtpProcess();
enum OnlyTrack { kAll = 0, kOnlyAudio = 1, kOnlyVideo = 2 };
/**
* rtp
@ -58,10 +59,10 @@ public:
void setStopCheckRtp(bool is_check=false);
/**
* track
* track/
* inputRtp前调用此方法
*/
void setOnlyAudio(bool only_audio);
void setOnlyTrack(OnlyTrack only_track);
/**
* flush输出缓存
@ -93,7 +94,7 @@ private:
void doCachedFunc();
private:
bool _only_audio = false;
OnlyTrack _only_track = kAll;
std::string _auth_err;
uint64_t _dts = 0;
uint64_t _total_bytes = 0;

View File

@ -40,10 +40,11 @@ void RtpSender::startSend(const MediaSourceEvent::SendRtpArgs &args, const funct
if (!_interface) {
//重连时不重新创建对象
auto lam = [this](std::shared_ptr<List<Buffer::Ptr>> list) { onFlushRtpList(std::move(list)); };
if (args.use_ps) {
_interface = std::make_shared<RtpCachePS>(lam, atoi(args.ssrc.data()), args.pt);
} else {
_interface = std::make_shared<RtpCacheRaw>(lam, atoi(args.ssrc.data()), args.pt, args.only_audio);
switch (args.type) {
case MediaSourceEvent::SendRtpArgs::kRtpPS: _interface = std::make_shared<RtpCachePS>(lam, atoi(args.ssrc.data()), args.pt, true); break;
case MediaSourceEvent::SendRtpArgs::kRtpTS: _interface = std::make_shared<RtpCachePS>(lam, atoi(args.ssrc.data()), args.pt, false); break;
case MediaSourceEvent::SendRtpArgs::kRtpRAW: _interface = std::make_shared<RtpCacheRaw>(lam, atoi(args.ssrc.data()), args.pt, args.only_audio); break;
default: CHECK(0, "invalid rtp type:" + to_string(args.type)); break;
}
}

View File

@ -42,12 +42,12 @@ public:
}
}
void setRtpServerInfo(uint16_t local_port,RtpServer::TcpMode mode,bool re_use_port,uint32_t ssrc, bool only_audio) {
void setRtpServerInfo(uint16_t local_port, RtpServer::TcpMode mode, bool re_use_port, uint32_t ssrc, int only_track) {
_local_port = local_port;
_tcp_mode = mode;
_re_use_port = re_use_port;
_ssrc = ssrc;
_only_audio = only_audio;
_only_track = only_track;
}
void setOnDetach(function<void()> cb) {
@ -61,7 +61,7 @@ public:
void onRecvRtp(const Socket::Ptr &sock, const Buffer::Ptr &buf, struct sockaddr *addr) {
if (!_process) {
_process = RtpSelector::Instance().getProcess(_stream_id, true);
_process->setOnlyAudio(_only_audio);
_process->setOnlyTrack((RtpProcess::OnlyTrack)_only_track);
_process->setOnDetach(std::move(_on_detach));
cancelDelayTask();
}
@ -142,7 +142,7 @@ private:
private:
bool _re_use_port = false;
bool _only_audio = false;
int _only_track = 0;
uint16_t _local_port = 0;
uint32_t _ssrc = 0;
RtpServer::TcpMode _tcp_mode = RtpServer::NONE;
@ -156,7 +156,7 @@ private:
EventPoller::DelayTask::Ptr _delay_task;
};
void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc, bool only_audio, bool multiplex) {
void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
//创建udp服务器
Socket::Ptr rtp_socket = Socket::createSocket(nullptr, true);
Socket::Ptr rtcp_socket = Socket::createSocket(nullptr, true);
@ -174,7 +174,8 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
}
//设置udp socket读缓存
SockUtil::setRecvBuf(rtp_socket->rawFD(), 4 * 1024 * 1024);
GET_CONFIG(int, udpRecvSocketBuffer, RtpProxy::kUdpRecvSocketBuffer);
SockUtil::setRecvBuf(rtp_socket->rawFD(), udpRecvSocketBuffer);
TcpServer::Ptr tcp_server;
_tcp_mode = tcp_mode;
@ -183,7 +184,7 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
tcp_server = std::make_shared<TcpServer>(rtp_socket->getPoller());
(*tcp_server)[RtpSession::kStreamID] = stream_id;
(*tcp_server)[RtpSession::kSSRC] = ssrc;
(*tcp_server)[RtpSession::kOnlyAudio] = only_audio;
(*tcp_server)[RtpSession::kOnlyTrack] = only_track;
if (tcp_mode == PASSIVE) {
tcp_server->start<RtpSession>(local_port, local_ip);
} else if (stream_id.empty()) {
@ -200,7 +201,7 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
//指定了流id那么一个端口一个流(不管是否包含多个ssrc的多个流绑定rtp源后会筛选掉ip端口不匹配的流)
helper = std::make_shared<RtcpHelper>(std::move(rtcp_socket), stream_id);
helper->startRtcp();
helper->setRtpServerInfo(local_port, tcp_mode, re_use_port, ssrc, only_audio);
helper->setRtpServerInfo(local_port, tcp_mode, re_use_port, ssrc, only_track);
bool bind_peer_addr = false;
auto ssrc_ptr = std::make_shared<uint32_t>(ssrc);
_ssrc = ssrc_ptr;
@ -222,7 +223,8 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
} else {
//单端口多线程接收多个流根据ssrc区分流
udp_server = std::make_shared<UdpServer>(rtp_socket->getPoller());
(*udp_server)[RtpSession::kOnlyAudio] = only_audio;
(*udp_server)[RtpSession::kOnlyTrack] = only_track;
(*udp_server)[RtpSession::kUdpRecvBuffer] = udpRecvSocketBuffer;
udp_server->start<RtpSession>(local_port, local_ip);
rtp_socket = nullptr;
}

View File

@ -44,7 +44,7 @@ public:
* @param multiplex
*/
void start(uint16_t local_port, const std::string &stream_id = "", TcpMode tcp_mode = PASSIVE,
const char *local_ip = "::", bool re_use_port = true, uint32_t ssrc = 0, bool only_audio = false, bool multiplex = false);
const char *local_ip = "::", bool re_use_port = true, uint32_t ssrc = 0, int only_track = 0, bool multiplex = false);
/**
* tcp服务(tcp主动模式)
@ -81,7 +81,7 @@ protected:
std::shared_ptr<RtcpHelper> _rtcp_helper;
std::function<void()> _on_cleanup;
bool _only_audio = false;
int _only_track = 0;
//用于tcp主动模式
TcpMode _tcp_mode = NONE;
};

View File

@ -23,7 +23,8 @@ namespace mediakit{
const string RtpSession::kStreamID = "stream_id";
const string RtpSession::kSSRC = "ssrc";
const string RtpSession::kOnlyAudio = "only_audio";
const string RtpSession::kOnlyTrack = "only_track";
const string RtpSession::kUdpRecvBuffer = "udp_recv_socket_buffer";
void RtpSession::attachServer(const Server &server) {
setParams(const_cast<Server &>(server));
@ -32,7 +33,13 @@ void RtpSession::attachServer(const Server &server) {
void RtpSession::setParams(mINI &ini) {
_stream_id = ini[kStreamID];
_ssrc = ini[kSSRC];
_only_audio = ini[kOnlyAudio];
_only_track = ini[kOnlyTrack];
int udp_socket_buffer = ini[kUdpRecvBuffer];
if (_is_udp) {
// 设置udp socket读缓存
SockUtil::setRecvBuf(getSock()->rawFD(),
(udp_socket_buffer > 0) ? udp_socket_buffer : (4 * 1024 * 1024));
}
}
RtpSession::RtpSession(const Socket::Ptr &sock)
@ -40,10 +47,6 @@ RtpSession::RtpSession(const Socket::Ptr &sock)
socklen_t addr_len = sizeof(_addr);
getpeername(sock->rawFD(), (struct sockaddr *)&_addr, &addr_len);
_is_udp = sock->sockType() == SockNum::Sock_UDP;
if (_is_udp) {
// 设置udp socket读缓存
SockUtil::setRecvBuf(getSock()->rawFD(), 4 * 1024 * 1024);
}
}
RtpSession::~RtpSession() = default;
@ -122,7 +125,7 @@ void RtpSession::onRtpPacket(const char *data, size_t len) {
_delay_close = true;
return;
}
_process->setOnlyAudio(_only_audio);
_process->setOnlyTrack((RtpProcess::OnlyTrack)_only_track);
_process->setDelegate(static_pointer_cast<RtpSession>(shared_from_this()));
}
try {

View File

@ -24,7 +24,8 @@ class RtpSession : public toolkit::Session, public RtpSplitter, public MediaSour
public:
static const std::string kStreamID;
static const std::string kSSRC;
static const std::string kOnlyAudio;
static const std::string kOnlyTrack;
static const std::string kUdpRecvBuffer;
RtpSession(const toolkit::Socket::Ptr &sock);
~RtpSession() override;
@ -51,7 +52,7 @@ private:
bool _is_udp = false;
bool _search_rtp = false;
bool _search_rtp_finished = false;
bool _only_audio = false;
int _only_track = 0;
uint32_t _ssrc = 0;
toolkit::Ticker _ticker;
std::string _stream_id;

View File

@ -93,6 +93,17 @@ public:
RtpInfo &getRtpInfo() { return *_rtp_info; }
enum {
RTP_ENCODER_PKT_DUR_MS = 1 // 主要应用于g711 rtp 打包器每个包的时间长度option_value 为int*, option_len 为4
};
/**
* @brief rtp打包器与解包器的相关参数g711 rtp 使setsockopt
*
* @param opt
* @param param
*/
virtual void setOpt(int opt, const toolkit::Any &param) {};
private:
std::unique_ptr<RtpInfo> _rtp_info;
};

View File

@ -352,12 +352,20 @@ public:
}
void makeSockPair(std::pair<Socket::Ptr, Socket::Ptr> &pair, const string &local_ip, bool re_use_port, bool is_udp) {
auto &sock0 = pair.first;
auto &sock1 = pair.second;
auto sock_pair = getPortPair();
if (!sock_pair) {
throw runtime_error("none reserved port in pool");
}
makeSockPair_l(sock_pair, pair, local_ip, re_use_port, is_udp);
// 确保udp和tcp模式都能打开
auto new_pair = std::make_pair(Socket::createSocket(), Socket::createSocket());
makeSockPair_l(sock_pair, new_pair, local_ip, re_use_port, !is_udp);
}
void makeSockPair_l(const std::shared_ptr<uint16_t> &sock_pair, std::pair<Socket::Ptr, Socket::Ptr> &pair, const string &local_ip, bool re_use_port, bool is_udp) {
auto &sock0 = pair.first;
auto &sock1 = pair.second;
if (is_udp) {
if (!sock0->bindUdpSock(2 * *sock_pair, local_ip.data(), re_use_port)) {
// 分配端口失败

View File

@ -16,41 +16,17 @@ SrtSession::SrtSession(const Socket::Ptr &sock)
// TraceL<<"after addr len "<<addr_len<<" family "<<_peer_addr.ss_family;
}
EventPoller::Ptr SrtSession::queryPoller(const Buffer::Ptr &buffer) {
uint8_t *data = (uint8_t *)buffer->data();
size_t size = buffer->size();
if (DataPacket::isDataPacket(data, size)) {
uint32_t socket_id = DataPacket::getSocketID(data, size);
auto trans = SrtTransportManager::Instance().getItem(std::to_string(socket_id));
return trans ? trans->getPoller() : nullptr;
}
if (HandshakePacket::isHandshakePacket(data, size)) {
auto type = HandshakePacket::getHandshakeType(data, size);
if (type == HandshakePacket::HS_TYPE_INDUCTION) {
// 握手第一阶段
return nullptr;
} else if (type == HandshakePacket::HS_TYPE_CONCLUSION) {
// 握手第二阶段
uint32_t sync_cookie = HandshakePacket::getSynCookie(data, size);
auto trans = SrtTransportManager::Instance().getHandshakeItem(std::to_string(sync_cookie));
return trans ? trans->getPoller() : nullptr;
} else {
WarnL << " not reach there";
}
} else {
uint32_t socket_id = ControlPacket::getSocketID(data, size);
auto trans = SrtTransportManager::Instance().getItem(std::to_string(socket_id));
return trans ? trans->getPoller() : nullptr;
}
return nullptr;
}
void SrtSession::attachServer(const toolkit::Server &server) {
SockUtil::setRecvBuf(getSock()->rawFD(), 1024 * 1024);
}
extern SrtTransport::Ptr querySrtTransport(uint8_t *data, size_t size, const EventPoller::Ptr& poller);
EventPoller::Ptr SrtSession::queryPoller(const Buffer::Ptr &buffer) {
auto transport = querySrtTransport((uint8_t *)buffer->data(), buffer->size(), nullptr);
return transport ? transport->getPoller() : nullptr;
}
void SrtSession::onRecv(const Buffer::Ptr &buffer) {
uint8_t *data = (uint8_t *)buffer->data();
size_t size = buffer->size();
@ -58,45 +34,7 @@ void SrtSession::onRecv(const Buffer::Ptr &buffer) {
if (_find_transport) {
//只允许寻找一次transport
_find_transport = false;
if (DataPacket::isDataPacket(data, size)) {
uint32_t socket_id = DataPacket::getSocketID(data, size);
auto trans = SrtTransportManager::Instance().getItem(std::to_string(socket_id));
if (trans) {
_transport = std::move(trans);
} else {
WarnL << " data packet not find transport ";
}
}
if (HandshakePacket::isHandshakePacket(data, size)) {
auto type = HandshakePacket::getHandshakeType(data, size);
if (type == HandshakePacket::HS_TYPE_INDUCTION) {
// 握手第一阶段
_transport = std::make_shared<SrtTransportImp>(getPoller());
} else if (type == HandshakePacket::HS_TYPE_CONCLUSION) {
// 握手第二阶段
uint32_t sync_cookie = HandshakePacket::getSynCookie(data, size);
auto trans = SrtTransportManager::Instance().getHandshakeItem(std::to_string(sync_cookie));
if (trans) {
_transport = std::move(trans);
} else {
WarnL << " hanshake packet not find transport ";
}
} else {
WarnL << " not reach there";
}
} else {
uint32_t socket_id = ControlPacket::getSocketID(data, size);
auto trans = SrtTransportManager::Instance().getItem(std::to_string(socket_id));
if (trans) {
_transport = std::move(trans);
} else {
WarnL << " not find transport";
}
}
_transport = querySrtTransport(data, size, getPoller());
if (_transport) {
_transport->setSession(static_pointer_cast<Session>(shared_from_this()));
}

View File

@ -61,7 +61,7 @@ void SrtTransport::switchToOtherTransport(uint8_t *buf, int len, uint32_t socket
BufferRaw::Ptr tmp = BufferRaw::create();
struct sockaddr_storage tmp_addr = *addr;
tmp->assign((char *)buf, len);
auto trans = SrtTransportManager::Instance().getItem(std::to_string(socketid));
auto trans = SrtTransportManager::Instance().getItem(socketid);
if (trans) {
trans->getPoller()->async([tmp, tmp_addr, trans] {
trans->inputSockData((uint8_t *)tmp->data(), tmp->size(), (struct sockaddr_storage *)&tmp_addr);
@ -700,30 +700,30 @@ void SrtTransport::sendPacket(Buffer::Ptr pkt, bool flush) {
}
}
std::string SrtTransport::getIdentifier() {
std::string SrtTransport::getIdentifier() const {
return _selected_session ? _selected_session->getIdentifier() : "";
}
void SrtTransport::registerSelfHandshake() {
SrtTransportManager::Instance().addHandshakeItem(std::to_string(_sync_cookie), shared_from_this());
SrtTransportManager::Instance().addHandshakeItem(_sync_cookie, shared_from_this());
}
void SrtTransport::unregisterSelfHandshake() {
if (_sync_cookie == 0) {
return;
}
SrtTransportManager::Instance().removeHandshakeItem(std::to_string(_sync_cookie));
SrtTransportManager::Instance().removeHandshakeItem(_sync_cookie);
}
void SrtTransport::registerSelf() {
if (_socket_id == 0) {
return;
}
SrtTransportManager::Instance().addItem(std::to_string(_socket_id), shared_from_this());
SrtTransportManager::Instance().addItem(_socket_id, shared_from_this());
}
void SrtTransport::unregisterSelf() {
SrtTransportManager::Instance().removeItem(std::to_string(_socket_id));
SrtTransportManager::Instance().removeItem(_socket_id);
}
void SrtTransport::onShutdown(const SockException &ex) {
@ -739,7 +739,7 @@ void SrtTransport::onShutdown(const SockException &ex) {
}
}
size_t SrtTransport::getPayloadSize() {
size_t SrtTransport::getPayloadSize() const {
size_t ret = (_mtu - 28 - 16) / 188 * 188;
return ret;
}
@ -792,15 +792,13 @@ SrtTransportManager &SrtTransportManager::Instance() {
return s_instance;
}
void SrtTransportManager::addItem(const std::string &key, const SrtTransport::Ptr &ptr) {
void SrtTransportManager::addItem(const uint32_t key, const SrtTransport::Ptr &ptr) {
std::lock_guard<std::mutex> lck(_mtx);
_map[key] = ptr;
}
SrtTransport::Ptr SrtTransportManager::getItem(const std::string &key) {
if (key.empty()) {
return nullptr;
}
SrtTransport::Ptr SrtTransportManager::getItem(const uint32_t key) {
assert(key > 0);
std::lock_guard<std::mutex> lck(_mtx);
auto it = _map.find(key);
if (it == _map.end()) {
@ -809,25 +807,23 @@ SrtTransport::Ptr SrtTransportManager::getItem(const std::string &key) {
return it->second.lock();
}
void SrtTransportManager::removeItem(const std::string &key) {
void SrtTransportManager::removeItem(const uint32_t key) {
std::lock_guard<std::mutex> lck(_mtx);
_map.erase(key);
}
void SrtTransportManager::addHandshakeItem(const std::string &key, const SrtTransport::Ptr &ptr) {
void SrtTransportManager::addHandshakeItem(const uint32_t key, const SrtTransport::Ptr &ptr) {
std::lock_guard<std::mutex> lck(_handshake_mtx);
_handshake_map[key] = ptr;
}
void SrtTransportManager::removeHandshakeItem(const std::string &key) {
void SrtTransportManager::removeHandshakeItem(const uint32_t key) {
std::lock_guard<std::mutex> lck(_handshake_mtx);
_handshake_map.erase(key);
}
SrtTransport::Ptr SrtTransportManager::getHandshakeItem(const std::string &key) {
if (key.empty()) {
return nullptr;
}
SrtTransport::Ptr SrtTransportManager::getHandshakeItem(const uint32_t key) {
assert(key > 0);
std::lock_guard<std::mutex> lck(_handshake_mtx);
auto it = _handshake_map.find(key);
if (it == _handshake_map.end()) {

View File

@ -45,7 +45,7 @@ public:
virtual void inputSockData(uint8_t *buf, int len, struct sockaddr_storage *addr);
virtual void onSendTSData(const Buffer::Ptr &buffer, bool flush);
std::string getIdentifier();
std::string getIdentifier() const;
void unregisterSelf();
void unregisterSelfHandshake();
@ -89,7 +89,7 @@ private:
void sendShutDown();
void sendMsgDropReq(uint32_t first, uint32_t last);
size_t getPayloadSize();
size_t getPayloadSize() const;
void createTimerForCheckAlive();
@ -164,23 +164,23 @@ private:
class SrtTransportManager {
public:
static SrtTransportManager &Instance();
SrtTransport::Ptr getItem(const std::string &key);
void addItem(const std::string &key, const SrtTransport::Ptr &ptr);
void removeItem(const std::string &key);
SrtTransport::Ptr getItem(const uint32_t key);
void addItem(const uint32_t key, const SrtTransport::Ptr &ptr);
void removeItem(const uint32_t key);
void addHandshakeItem(const std::string &key, const SrtTransport::Ptr &ptr);
void removeHandshakeItem(const std::string &key);
SrtTransport::Ptr getHandshakeItem(const std::string &key);
void addHandshakeItem(const uint32_t key, const SrtTransport::Ptr &ptr);
void removeHandshakeItem(const uint32_t key);
SrtTransport::Ptr getHandshakeItem(const uint32_t key);
private:
SrtTransportManager() = default;
private:
std::mutex _mtx;
std::unordered_map<std::string, std::weak_ptr<SrtTransport>> _map;
std::unordered_map<uint32_t , std::weak_ptr<SrtTransport>> _map;
std::mutex _handshake_mtx;
std::unordered_map<std::string, std::weak_ptr<SrtTransport>> _handshake_map;
std::unordered_map<uint32_t, std::weak_ptr<SrtTransport>> _handshake_map;
};
} // namespace SRT

View File

@ -24,6 +24,32 @@ SrtTransportImp::~SrtTransportImp() {
}
}
SrtTransport::Ptr querySrtTransport(uint8_t *data, size_t size, const EventPoller::Ptr& poller) {
if (DataPacket::isDataPacket(data, size)) {
uint32_t socket_id = DataPacket::getSocketID(data, size);
return SrtTransportManager::Instance().getItem(socket_id);
}
if (HandshakePacket::isHandshakePacket(data, size)) {
auto type = HandshakePacket::getHandshakeType(data, size);
if (type == HandshakePacket::HS_TYPE_INDUCTION) {
// 握手第一阶段
return poller ? std::make_shared<SrtTransportImp>(poller) : nullptr;
}
if (type == HandshakePacket::HS_TYPE_CONCLUSION) {
// 握手第二阶段
uint32_t sync_cookie = HandshakePacket::getSynCookie(data, size);
return SrtTransportManager::Instance().getHandshakeItem(sync_cookie);
}
}
uint32_t socket_id = ControlPacket::getSocketID(data, size);
return SrtTransportManager::Instance().getItem(socket_id);
}
void SrtTransportImp::onHandShakeFinished(std::string &streamid, struct sockaddr_storage *addr) {
SrtTransport::onHandShakeFinished(streamid,addr);
// TODO parse stream id like this zlmediakit.com/live/test?token=1213444&type=push

View File

@ -741,8 +741,7 @@ namespace RTC
if (!IsRunning())
{
MS_ERROR("cannot process data while not running");
MS_WARN_TAG(nullptr,"cannot process data while not running");
return;
}

File diff suppressed because it is too large Load Diff

View File

@ -22,97 +22,87 @@
namespace mediakit {
//https://datatracker.ietf.org/doc/rfc4566/?include_text=1
//https://blog.csdn.net/aggresss/article/details/109850434
//https://aggresss.blog.csdn.net/article/details/106436703
//Session description
// v= (protocol version)
// o= (originator and session identifier)
// s= (session name)
// i=* (session information)
// u=* (URI of description)
// e=* (email address)
// p=* (phone number)
// c=* (connection information -- not required if included in
// all media)
// b=* (zero or more bandwidth information lines)
// One or more time descriptions ("t=" and "r=" lines; see below)
// z=* (time zone adjustments)
// k=* (encryption key)
// a=* (zero or more session attribute lines)
// Zero or more media descriptions
// https://datatracker.ietf.org/doc/rfc4566/?include_text=1
// https://blog.csdn.net/aggresss/article/details/109850434
// https://aggresss.blog.csdn.net/article/details/106436703
// Session description
// v= (protocol version)
// o= (originator and session identifier)
// s= (session name)
// i=* (session information)
// u=* (URI of description)
// e=* (email address)
// p=* (phone number)
// c=* (connection information -- not required if included in
// all media)
// b=* (zero or more bandwidth information lines)
// One or more time descriptions ("t=" and "r=" lines; see below)
// z=* (time zone adjustments)
// k=* (encryption key)
// a=* (zero or more session attribute lines)
// Zero or more media descriptions
//
// Time description
// t= (time the session is active)
// r=* (zero or more repeat times)
// Time description
// t= (time the session is active)
// r=* (zero or more repeat times)
//
// Media description, if present
// m= (media name and transport address)
// i=* (media title)
// c=* (connection information -- optional if included at
// session level)
// b=* (zero or more bandwidth information lines)
// k=* (encryption key)
// a=* (zero or more media attribute lines)
// Media description, if present
// m= (media name and transport address)
// i=* (media title)
// c=* (connection information -- optional if included at
// session level)
// b=* (zero or more bandwidth information lines)
// k=* (encryption key)
// a=* (zero or more media attribute lines)
enum class RtpDirection {
invalid = -1,
//只发送
// 只发送
sendonly,
//只接收
// 只接收
recvonly,
//同时发送接收
// 同时发送接收
sendrecv,
//禁止发送数据
// 禁止发送数据
inactive
};
enum class DtlsRole {
invalid = -1,
//客户端
// 客户端
active,
//服务端
// 服务端
passive,
//既可作做客户端也可以做服务端
// 既可作做客户端也可以做服务端
actpass,
};
enum class SdpType {
invalid = -1,
offer,
answer
};
enum class SdpType { invalid = -1, offer, answer };
DtlsRole getDtlsRole(const std::string &str);
const char* getDtlsRoleString(DtlsRole role);
const char *getDtlsRoleString(DtlsRole role);
RtpDirection getRtpDirection(const std::string &str);
const char* getRtpDirectionString(RtpDirection val);
const char *getRtpDirectionString(RtpDirection val);
class SdpItem {
public:
using Ptr = std::shared_ptr<SdpItem>;
virtual ~SdpItem() = default;
virtual void parse(const std::string &str) {
value = str;
}
virtual std::string toString() const {
return value;
}
virtual const char* getKey() const = 0;
virtual void parse(const std::string &str) { value = str; }
virtual std::string toString() const { return value; }
virtual const char *getKey() const = 0;
void reset() {
value.clear();
}
void reset() { value.clear(); }
protected:
mutable std::string value;
};
template <char KEY>
class SdpString : public SdpItem{
class SdpString : public SdpItem {
public:
SdpString() = default;
SdpString(std::string val) {value = std::move(val);}
SdpString(std::string val) { value = std::move(val); }
// *=*
const char* getKey() const override { static std::string key(1, KEY); return key.data();}
};
@ -126,34 +116,34 @@ public:
this->value = std::move(val);
}
const char* getKey() const override { return key.data();}
const char *getKey() const override { return key.data(); }
};
class SdpTime : public SdpItem{
class SdpTime : public SdpItem {
public:
//5.9. Timing ("t=")
// t=<start-time> <stop-time>
uint64_t start {0};
uint64_t stop {0};
// 5.9. Timing ("t=")
// t=<start-time> <stop-time>
uint64_t start { 0 };
uint64_t stop { 0 };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "t";}
const char *getKey() const override { return "t"; }
};
class SdpOrigin : public SdpItem{
class SdpOrigin : public SdpItem {
public:
// 5.2. Origin ("o=")
// o=jdoe 2890844526 2890842807 IN IP4 10.47.16.5
// o=<username> <sess-id> <sess-version> <nettype> <addrtype> <unicast-address>
std::string username {"-"};
std::string username { "-" };
std::string session_id;
std::string session_version;
std::string nettype {"IN"};
std::string addrtype {"IP4"};
std::string address {"0.0.0.0"};
std::string nettype { "IN" };
std::string addrtype { "IP4" };
std::string address { "0.0.0.0" };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "o";}
const char *getKey() const override { return "o"; }
bool empty() const {
return username.empty() || session_id.empty() || session_version.empty()
|| nettype.empty() || addrtype.empty() || address.empty();
@ -165,28 +155,28 @@ public:
// 5.7. Connection Data ("c=")
// c=IN IP4 224.2.17.12/127
// c=<nettype> <addrtype> <connection-address>
std::string nettype {"IN"};
std::string addrtype {"IP4"};
std::string address {"0.0.0.0"};
std::string nettype { "IN" };
std::string addrtype { "IP4" };
std::string address { "0.0.0.0" };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "c";}
bool empty() const {return address.empty();}
const char *getKey() const override { return "c"; }
bool empty() const { return address.empty(); }
};
class SdpBandwidth : public SdpItem {
public:
//5.8. Bandwidth ("b=")
//b=<bwtype>:<bandwidth>
// 5.8. Bandwidth ("b=")
// b=<bwtype>:<bandwidth>
//AS、CT
std::string bwtype {"AS"};
uint32_t bandwidth {0};
// AS、CT
std::string bwtype { "AS" };
uint32_t bandwidth { 0 };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "b";}
bool empty() const {return bandwidth == 0;}
const char *getKey() const override { return "b"; }
bool empty() const { return bandwidth == 0; }
};
class SdpMedia : public SdpItem {
@ -195,287 +185,284 @@ public:
// m=<media> <port> <proto> <fmt> ...
TrackType type;
uint16_t port;
//RTP/AVP应用场景为视频/音频的 RTP 协议。参考 RFC 3551
//RTP/SAVP应用场景为视频/音频的 SRTP 协议。参考 RFC 3711
//RTP/AVPF: 应用场景为视频/音频的 RTP 协议,支持 RTCP-based Feedback。参考 RFC 4585
//RTP/SAVPF: 应用场景为视频/音频的 SRTP 协议,支持 RTCP-based Feedback。参考 RFC 5124
// RTP/AVP应用场景为视频/音频的 RTP 协议。参考 RFC 3551
// RTP/SAVP应用场景为视频/音频的 SRTP 协议。参考 RFC 3711
// RTP/AVPF: 应用场景为视频/音频的 RTP 协议,支持 RTCP-based Feedback。参考 RFC 4585
// RTP/SAVPF: 应用场景为视频/音频的 SRTP 协议,支持 RTCP-based Feedback。参考 RFC 5124
std::string proto;
std::vector<std::string> fmts;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "m";}
const char *getKey() const override { return "m"; }
};
class SdpAttr : public SdpItem{
class SdpAttr : public SdpItem {
public:
using Ptr = std::shared_ptr<SdpAttr>;
//5.13. Attributes ("a=")
//a=<attribute>
//a=<attribute>:<value>
// 5.13. Attributes ("a=")
// a=<attribute>
// a=<attribute>:<value>
SdpItem::Ptr detail;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "a";}
const char *getKey() const override { return "a"; }
};
class SdpAttrGroup : public SdpItem{
class SdpAttrGroup : public SdpItem {
public:
//a=group:BUNDLE line with all the 'mid' identifiers part of the
// BUNDLE group is included at the session-level.
//a=group:LS session level attribute MUST be included wth the 'mid'
// identifiers that are part of the same lip sync group.
std::string type {"BUNDLE"};
// a=group:BUNDLE line with all the 'mid' identifiers part of the
// BUNDLE group is included at the session-level.
// a=group:LS session level attribute MUST be included wth the 'mid'
// identifiers that are part of the same lip sync group.
std::string type { "BUNDLE" };
std::vector<std::string> mids;
void parse(const std::string &str) override ;
std::string toString() const override ;
const char* getKey() const override { return "group";}
void parse(const std::string &str) override;
std::string toString() const override;
const char *getKey() const override { return "group"; }
};
class SdpAttrMsidSemantic : public SdpItem {
public:
//https://tools.ietf.org/html/draft-alvestrand-rtcweb-msid-02#section-3
//3. The Msid-Semantic Attribute
// https://tools.ietf.org/html/draft-alvestrand-rtcweb-msid-02#section-3
// 3. The Msid-Semantic Attribute
//
// In order to fully reproduce the semantics of the SDP and SSRC
// grouping frameworks, a session-level attribute is defined for
// signalling the semantics associated with an msid grouping.
// In order to fully reproduce the semantics of the SDP and SSRC
// grouping frameworks, a session-level attribute is defined for
// signalling the semantics associated with an msid grouping.
//
// This OPTIONAL attribute gives the message ID and its group semantic.
// a=msid-semantic: examplefoo LS
// This OPTIONAL attribute gives the message ID and its group semantic.
// a=msid-semantic: examplefoo LS
//
//
// The ABNF of msid-semantic is:
// The ABNF of msid-semantic is:
//
// msid-semantic-attr = "msid-semantic:" " " msid token
// token = <as defined in RFC 4566>
// msid-semantic-attr = "msid-semantic:" " " msid token
// token = <as defined in RFC 4566>
//
// The semantic field may hold values from the IANA registries
// "Semantics for the "ssrc-group" SDP Attribute" and "Semantics for the
// "group" SDP Attribute".
//a=msid-semantic: WMS 616cfbb1-33a3-4d8c-8275-a199d6005549
std::string msid{"WMS"};
// The semantic field may hold values from the IANA registries
// "Semantics for the "ssrc-group" SDP Attribute" and "Semantics for the
// "group" SDP Attribute".
// a=msid-semantic: WMS 616cfbb1-33a3-4d8c-8275-a199d6005549
std::string msid { "WMS" };
std::string token;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "msid-semantic";}
bool empty() const {
return msid.empty();
}
const char *getKey() const override { return "msid-semantic"; }
bool empty() const { return msid.empty(); }
};
class SdpAttrRtcp : public SdpItem {
public:
// a=rtcp:9 IN IP4 0.0.0.0
uint16_t port{0};
std::string nettype {"IN"};
std::string addrtype {"IP4"};
std::string address {"0.0.0.0"};
void parse(const std::string &str) override;;
uint16_t port { 0 };
std::string nettype { "IN" };
std::string addrtype { "IP4" };
std::string address { "0.0.0.0" };
void parse(const std::string &str) override;
;
std::string toString() const override;
const char* getKey() const override { return "rtcp";}
bool empty() const {
return address.empty() || !port;
}
const char *getKey() const override { return "rtcp"; }
bool empty() const { return address.empty() || !port; }
};
class SdpAttrIceUfrag : public SdpItem {
public:
SdpAttrIceUfrag() = default;
SdpAttrIceUfrag(std::string str) {value = std::move(str);}
//a=ice-ufrag:sXJ3
const char* getKey() const override { return "ice-ufrag";}
SdpAttrIceUfrag(std::string str) { value = std::move(str); }
// a=ice-ufrag:sXJ3
const char *getKey() const override { return "ice-ufrag"; }
};
class SdpAttrIcePwd : public SdpItem {
public:
SdpAttrIcePwd() = default;
SdpAttrIcePwd(std::string str) {value = std::move(str);}
//a=ice-pwd:yEclOTrLg1gEubBFefOqtmyV
const char* getKey() const override { return "ice-pwd";}
SdpAttrIcePwd(std::string str) { value = std::move(str); }
// a=ice-pwd:yEclOTrLg1gEubBFefOqtmyV
const char *getKey() const override { return "ice-pwd"; }
};
class SdpAttrIceOption : public SdpItem {
public:
//a=ice-options:trickle
bool trickle{false};
bool renomination{false};
// a=ice-options:trickle
bool trickle { false };
bool renomination { false };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "ice-options";}
const char *getKey() const override { return "ice-options"; }
};
class SdpAttrFingerprint : public SdpItem {
public:
//a=fingerprint:sha-256 22:14:B5:AF:66:12:C7:C7:8D:EF:4B:DE:40:25:ED:5D:8F:17:54:DD:88:33:C0:13:2E:FD:1A:FA:7E:7A:1B:79
// a=fingerprint:sha-256 22:14:B5:AF:66:12:C7:C7:8D:EF:4B:DE:40:25:ED:5D:8F:17:54:DD:88:33:C0:13:2E:FD:1A:FA:7E:7A:1B:79
std::string algorithm;
std::string hash;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "fingerprint";}
const char *getKey() const override { return "fingerprint"; }
bool empty() const { return algorithm.empty() || hash.empty(); }
};
class SdpAttrSetup : public SdpItem {
public:
//a=setup:actpass
// a=setup:actpass
SdpAttrSetup() = default;
SdpAttrSetup(DtlsRole r) { role = r; }
DtlsRole role{DtlsRole::actpass};
DtlsRole role { DtlsRole::actpass };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "setup";}
const char *getKey() const override { return "setup"; }
};
class SdpAttrMid : public SdpItem {
public:
SdpAttrMid() = default;
SdpAttrMid(std::string val) { value = std::move(val); }
//a=mid:audio
const char* getKey() const override { return "mid";}
// a=mid:audio
const char *getKey() const override { return "mid"; }
};
class SdpAttrExtmap : public SdpItem {
public:
//https://aggresss.blog.csdn.net/article/details/106436703
//a=extmap:1[/sendonly] urn:ietf:params:rtp-hdrext:ssrc-audio-level
// https://aggresss.blog.csdn.net/article/details/106436703
// a=extmap:1[/sendonly] urn:ietf:params:rtp-hdrext:ssrc-audio-level
uint8_t id;
RtpDirection direction{RtpDirection::invalid};
RtpDirection direction { RtpDirection::invalid };
std::string ext;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "extmap";}
const char *getKey() const override { return "extmap"; }
};
class SdpAttrRtpMap : public SdpItem {
public:
//a=rtpmap:111 opus/48000/2
// a=rtpmap:111 opus/48000/2
uint8_t pt;
std::string codec;
uint32_t sample_rate;
uint32_t channel {0};
uint32_t channel { 0 };
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "rtpmap";}
const char *getKey() const override { return "rtpmap"; }
};
class SdpAttrRtcpFb : public SdpItem {
public:
//a=rtcp-fb:98 nack pli
//a=rtcp-fb:120 nack 支持 nack 重传nack (Negative-Acknowledgment) 。
//a=rtcp-fb:120 nack pli 支持 nack 关键帧重传PLI (Picture Loss Indication) 。
//a=rtcp-fb:120 ccm fir 支持编码层关键帧请求CCM (Codec Control Message)FIR (Full Intra Request ),通常与 nack pli 有同样的效果,但是 nack pli 是用于重传时的关键帧请求。
//a=rtcp-fb:120 goog-remb 支持 REMB (Receiver Estimated Maximum Bitrate) 。
//a=rtcp-fb:120 transport-cc 支持 TCC (Transport Congest Control) 。
// a=rtcp-fb:98 nack pli
// a=rtcp-fb:120 nack 支持 nack 重传nack (Negative-Acknowledgment) 。
// a=rtcp-fb:120 nack pli 支持 nack 关键帧重传PLI (Picture Loss Indication) 。
// a=rtcp-fb:120 ccm fir 支持编码层关键帧请求CCM (Codec Control Message)FIR (Full Intra Request ),通常与 nack pli 有同样的效果,但是 nack pli
// 是用于重传时的关键帧请求。 a=rtcp-fb:120 goog-remb 支持 REMB (Receiver Estimated Maximum Bitrate) 。 a=rtcp-fb:120 transport-cc 支持 TCC (Transport
// Congest Control) 。
uint8_t pt;
std::string rtcp_type;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "rtcp-fb";}
const char *getKey() const override { return "rtcp-fb"; }
};
class SdpAttrFmtp : public SdpItem {
public:
//fmtp:96 level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42e01f
// fmtp:96 level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42e01f
uint8_t pt;
std::map<std::string/*key*/, std::string/*value*/, StrCaseCompare> fmtp;
std::map<std::string /*key*/, std::string /*value*/, StrCaseCompare> fmtp;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "fmtp";}
const char *getKey() const override { return "fmtp"; }
};
class SdpAttrSSRC : public SdpItem {
public:
//a=ssrc:3245185839 cname:Cx4i/VTR51etgjT7
//a=ssrc:3245185839 msid:cb373bff-0fea-4edb-bc39-e49bb8e8e3b9 0cf7e597-36a2-4480-9796-69bf0955eef5
//a=ssrc:3245185839 mslabel:cb373bff-0fea-4edb-bc39-e49bb8e8e3b9
//a=ssrc:3245185839 label:0cf7e597-36a2-4480-9796-69bf0955eef5
//a=ssrc:<ssrc-id> <attribute>
//a=ssrc:<ssrc-id> <attribute>:<value>
//cname 是必须的msid/mslabel/label 这三个属性都是 WebRTC 自创的,或者说 Google 自创的,可以参考 https://tools.ietf.org/html/draft-ietf-mmusic-msid-17
// 理解它们三者的关系需要先了解三个概念RTP stream / MediaStreamTrack / MediaStream
//一个 a=ssrc 代表一个 RTP stream
//一个 MediaStreamTrack 通常包含一个或多个 RTP stream例如一个视频 MediaStreamTrack 中通常包含两个 RTP stream一个用于常规传输一个用于 nack 重传;
//一个 MediaStream 通常包含一个或多个 MediaStreamTrack ,例如 simulcast 场景下,一个 MediaStream 通常会包含三个不同编码质量的 MediaStreamTrack
//这种标记方式并不被 Firefox 认可,在 Firefox 生成的 SDP 中一个 a=ssrc 通常只有一行,例如:
//a=ssrc:3245185839 cname:Cx4i/VTR51etgjT7
// a=ssrc:3245185839 cname:Cx4i/VTR51etgjT7
// a=ssrc:3245185839 msid:cb373bff-0fea-4edb-bc39-e49bb8e8e3b9 0cf7e597-36a2-4480-9796-69bf0955eef5
// a=ssrc:3245185839 mslabel:cb373bff-0fea-4edb-bc39-e49bb8e8e3b9
// a=ssrc:3245185839 label:0cf7e597-36a2-4480-9796-69bf0955eef5
// a=ssrc:<ssrc-id> <attribute>
// a=ssrc:<ssrc-id> <attribute>:<value>
// cname 是必须的msid/mslabel/label 这三个属性都是 WebRTC 自创的,或者说 Google 自创的,可以参考 https://tools.ietf.org/html/draft-ietf-mmusic-msid-17
// 理解它们三者的关系需要先了解三个概念RTP stream / MediaStreamTrack / MediaStream
// 一个 a=ssrc 代表一个 RTP stream
// 一个 MediaStreamTrack 通常包含一个或多个 RTP stream例如一个视频 MediaStreamTrack 中通常包含两个 RTP stream一个用于常规传输一个用于 nack 重传;
// 一个 MediaStream 通常包含一个或多个 MediaStreamTrack ,例如 simulcast 场景下,一个 MediaStream 通常会包含三个不同编码质量的 MediaStreamTrack
// 这种标记方式并不被 Firefox 认可,在 Firefox 生成的 SDP 中一个 a=ssrc 通常只有一行,例如:
// a=ssrc:3245185839 cname:Cx4i/VTR51etgjT7
uint32_t ssrc;
std::string attribute;
std::string attribute_value;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "ssrc";}
const char *getKey() const override { return "ssrc"; }
};
class SdpAttrSSRCGroup : public SdpItem {
public:
//a=ssrc-group 定义参考 RFC 5576(https://tools.ietf.org/html/rfc5576) ,用于描述多个 ssrc 之间的关联,常见的有两种:
//a=ssrc-group:FID 2430709021 3715850271
// FID (Flow Identification) 最初用在 FEC 的关联中WebRTC 中通常用于关联一组常规 RTP stream 和 重传 RTP stream 。
//a=ssrc-group:SIM 360918977 360918978 360918980
// 在 Chrome 独有的 SDP munging 风格的 simulcast 中使用,将三组编码质量由低到高的 MediaStreamTrack 关联在一起。
std::string type{"FID"};
// a=ssrc-group 定义参考 RFC 5576(https://tools.ietf.org/html/rfc5576) ,用于描述多个 ssrc 之间的关联,常见的有两种:
// a=ssrc-group:FID 2430709021 3715850271
// FID (Flow Identification) 最初用在 FEC 的关联中WebRTC 中通常用于关联一组常规 RTP stream 和 重传 RTP stream 。
// a=ssrc-group:SIM 360918977 360918978 360918980
// 在 Chrome 独有的 SDP munging 风格的 simulcast 中使用,将三组编码质量由低到高的 MediaStreamTrack 关联在一起。
std::string type { "FID" };
std::vector<uint32_t> ssrcs;
bool isFID() const { return type == "FID"; }
bool isSIM() const { return type == "SIM"; }
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "ssrc-group";}
const char *getKey() const override { return "ssrc-group"; }
};
class SdpAttrSctpMap : public SdpItem {
public:
//https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-05
//a=sctpmap:5000 webrtc-datachannel 1024
//a=sctpmap: sctpmap-number media-subtypes [streams]
// https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-05
// a=sctpmap:5000 webrtc-datachannel 1024
// a=sctpmap: sctpmap-number media-subtypes [streams]
uint16_t port = 0;
std::string subtypes;
uint32_t streams = 0;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "sctpmap";}
const char *getKey() const override { return "sctpmap"; }
bool empty() const { return port == 0 && subtypes.empty() && streams == 0; }
};
class SdpAttrCandidate : public SdpItem {
public:
using Ptr = std::shared_ptr<SdpAttrCandidate>;
//https://tools.ietf.org/html/rfc5245
//15.1. "candidate" Attribute
//a=candidate:4 1 udp 2 192.168.1.7 58107 typ host
//a=candidate:<foundation> <component-id> <transport> <priority> <address> <port> typ <cand-type>
// https://tools.ietf.org/html/rfc5245
// 15.1. "candidate" Attribute
// a=candidate:4 1 udp 2 192.168.1.7 58107 typ host
// a=candidate:<foundation> <component-id> <transport> <priority> <address> <port> typ <cand-type>
std::string foundation;
//传输媒体的类型,1代表RTP;2代表 RTCP。
// 传输媒体的类型,1代表RTP;2代表 RTCP。
uint32_t component;
std::string transport {"udp"};
std::string transport { "udp" };
uint32_t priority;
std::string address;
uint16_t port;
std::string type;
std::vector<std::pair<std::string, std::string> > arr;
std::vector<std::pair<std::string, std::string>> arr;
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "candidate";}
const char *getKey() const override { return "candidate"; }
};
class SdpAttrMsid : public SdpItem{
class SdpAttrMsid : public SdpItem {
public:
const char* getKey() const override { return "msid";}
const char *getKey() const override { return "msid"; }
};
class SdpAttrExtmapAllowMixed : public SdpItem{
class SdpAttrExtmapAllowMixed : public SdpItem {
public:
const char* getKey() const override { return "extmap-allow-mixed";}
const char *getKey() const override { return "extmap-allow-mixed"; }
};
class SdpAttrSimulcast : public SdpItem{
class SdpAttrSimulcast : public SdpItem {
public:
//https://www.meetecho.com/blog/simulcast-janus-ssrc/
//https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-14
const char* getKey() const override { return "simulcast";}
// https://www.meetecho.com/blog/simulcast-janus-ssrc/
// https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-14
const char *getKey() const override { return "simulcast"; }
void parse(const std::string &str) override;
std::string toString() const override;
bool empty() const { return rids.empty(); }
@ -483,11 +470,11 @@ public:
std::vector<std::string> rids;
};
class SdpAttrRid : public SdpItem{
class SdpAttrRid : public SdpItem {
public:
void parse(const std::string &str) override;
std::string toString() const override;
const char* getKey() const override { return "rid";}
const char *getKey() const override { return "rid"; }
std::string direction;
std::string rid;
};
@ -507,8 +494,8 @@ public:
RtpDirection getDirection() const;
template<typename cls>
cls getItemClass(char key, const char *attr_key = nullptr) const{
template <typename cls>
cls getItemClass(char key, const char *attr_key = nullptr) const {
auto item = std::dynamic_pointer_cast<cls>(getItem(key, attr_key));
if (!item) {
return cls();
@ -516,7 +503,7 @@ public:
return *item;
}
std::string getStringItem(char key, const char *attr_key = nullptr) const{
std::string getStringItem(char key, const char *attr_key = nullptr) const {
auto item = getItem(key, attr_key);
if (!item) {
return "";
@ -526,7 +513,7 @@ public:
SdpItem::Ptr getItem(char key, const char *attr_key = nullptr) const;
template<typename cls>
template <typename cls>
std::vector<cls> getAllItem(char key_c, const char *attr_key = nullptr) const {
std::vector<cls> ret;
std::string key(1, key_c);
@ -555,7 +542,7 @@ private:
std::vector<SdpItem::Ptr> items;
};
class RtcSessionSdp : public RtcSdpBase{
class RtcSessionSdp : public RtcSdpBase {
public:
using Ptr = std::shared_ptr<RtcSessionSdp>;
int getVersion() const;
@ -580,45 +567,45 @@ public:
//////////////////////////////////////////////////////////////////
//ssrc相关信息
class RtcSSRC{
// ssrc相关信息
class RtcSSRC {
public:
uint32_t ssrc {0};
uint32_t rtx_ssrc {0};
uint32_t ssrc { 0 };
uint32_t rtx_ssrc { 0 };
std::string cname;
std::string msid;
std::string mslabel;
std::string label;
bool empty() const {return ssrc == 0 && cname.empty();}
bool empty() const { return ssrc == 0 && cname.empty(); }
};
//rtc传输编码方案
class RtcCodecPlan{
// rtc传输编码方案
class RtcCodecPlan {
public:
using Ptr = std::shared_ptr<RtcCodecPlan>;
uint8_t pt;
std::string codec;
uint32_t sample_rate;
//音频时有效
// 音频时有效
uint32_t channel = 0;
//rtcp反馈
// rtcp反馈
std::set<std::string> rtcp_fb;
std::map<std::string/*key*/, std::string/*value*/, StrCaseCompare> fmtp;
std::map<std::string /*key*/, std::string /*value*/, StrCaseCompare> fmtp;
std::string getFmtp(const char *key) const;
};
//rtc 媒体描述
class RtcMedia{
// rtc 媒体描述
class RtcMedia {
public:
TrackType type{TrackType::TrackInvalid};
TrackType type { TrackType::TrackInvalid };
std::string mid;
uint16_t port{0};
uint16_t port { 0 };
SdpConnection addr;
SdpBandwidth bandwidth;
std::string proto;
RtpDirection direction{RtpDirection::invalid};
RtpDirection direction { RtpDirection::invalid };
std::vector<RtcCodecPlan> plan;
//////// rtp ////////
@ -629,20 +616,20 @@ public:
std::vector<std::string> rtp_rids;
//////// rtcp ////////
bool rtcp_mux{false};
bool rtcp_rsize{false};
bool rtcp_mux { false };
bool rtcp_rsize { false };
SdpAttrRtcp rtcp_addr;
//////// ice ////////
bool ice_trickle{false};
bool ice_lite{false};
bool ice_renomination{false};
bool ice_trickle { false };
bool ice_lite { false };
bool ice_renomination { false };
std::string ice_ufrag;
std::string ice_pwd;
std::vector<SdpAttrCandidate> candidate;
//////// dtls ////////
DtlsRole role{DtlsRole::invalid};
DtlsRole role { DtlsRole::invalid };
SdpAttrFingerprint fingerprint;
//////// extmap ////////
@ -650,7 +637,7 @@ public:
//////// sctp ////////////
SdpAttrSctpMap sctpmap;
uint32_t sctp_port{0};
uint32_t sctp_port { 0 };
void checkValid() const;
const RtcCodecPlan *getPlan(uint8_t pt) const;
@ -679,7 +666,7 @@ public:
void checkValid() const;
std::string toString() const;
std::string toRtspSdp() const;
const RtcMedia *getMedia(TrackType type) const;
const RtcMedia *getMedia(TrackType type) const;
bool supportRtcpFb(const std::string &name, TrackType type = TrackType::TrackVideo) const;
bool supportSimulcast() const;
bool isOnlyDatachannel() const;
@ -705,7 +692,7 @@ public:
std::string ice_ufrag;
std::string ice_pwd;
RtpDirection direction{RtpDirection::invalid};
RtpDirection direction { RtpDirection::invalid };
SdpAttrFingerprint fingerprint;
std::set<std::string> rtcp_fb;
@ -752,6 +739,6 @@ private:
~SdpConst() = delete;
};
}// namespace mediakit
} // namespace mediakit
#endif //ZLMEDIAKIT_SDP_H
#endif // ZLMEDIAKIT_SDP_H

View File

@ -27,7 +27,6 @@ protected:
void onRtp(const char *buf, size_t len, uint64_t stamp_ms) override;
void onRtcp(const char *buf, size_t len) override;
void onRecvRtp(MediaTrack &track, const std::string &rid, RtpPacket::Ptr rtp) override {};
void onBeforeEncryptRtp(const char *buf, int &len, void *ctx) override {};
void onBeforeEncryptRtcp(const char *buf, int &len, void *ctx) override {};

View File

@ -17,9 +17,8 @@ namespace mediakit {
WebRtcPlayer::Ptr WebRtcPlayer::create(const EventPoller::Ptr &poller,
const RtspMediaSource::Ptr &src,
const MediaInfo &info,
bool preferred_tcp) {
WebRtcPlayer::Ptr ret(new WebRtcPlayer(poller, src, info, preferred_tcp), [](WebRtcPlayer *ptr) {
const MediaInfo &info) {
WebRtcPlayer::Ptr ret(new WebRtcPlayer(poller, src, info), [](WebRtcPlayer *ptr) {
ptr->onDestory();
delete ptr;
});
@ -29,8 +28,7 @@ WebRtcPlayer::Ptr WebRtcPlayer::create(const EventPoller::Ptr &poller,
WebRtcPlayer::WebRtcPlayer(const EventPoller::Ptr &poller,
const RtspMediaSource::Ptr &src,
const MediaInfo &info,
bool preferred_tcp) : WebRtcTransportImp(poller,preferred_tcp) {
const MediaInfo &info) : WebRtcTransportImp(poller) {
_media_info = info;
_play_src = src;
CHECK(src);

View File

@ -19,7 +19,7 @@ namespace mediakit {
class WebRtcPlayer : public WebRtcTransportImp {
public:
using Ptr = std::shared_ptr<WebRtcPlayer>;
static Ptr create(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src, const MediaInfo &info, bool preferred_tcp = false);
static Ptr create(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src, const MediaInfo &info);
MediaInfo getMediaInfo() { return _media_info; }
protected:
@ -27,10 +27,9 @@ protected:
void onStartWebRTC() override;
void onDestory() override;
void onRtcConfigure(RtcConfigure &configure) const override;
void onRecvRtp(MediaTrack &track, const std::string &rid, RtpPacket::Ptr rtp) override {};
private:
WebRtcPlayer(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src, const MediaInfo &info, bool preferred_tcp);
WebRtcPlayer(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src, const MediaInfo &info);
private:
//媒体相关元数据

View File

@ -20,9 +20,8 @@ WebRtcPusher::Ptr WebRtcPusher::create(const EventPoller::Ptr &poller,
const RtspMediaSource::Ptr &src,
const std::shared_ptr<void> &ownership,
const MediaInfo &info,
const ProtocolOption &option,
bool preferred_tcp) {
WebRtcPusher::Ptr ret(new WebRtcPusher(poller, src, ownership, info, option,preferred_tcp), [](WebRtcPusher *ptr) {
const ProtocolOption &option) {
WebRtcPusher::Ptr ret(new WebRtcPusher(poller, src, ownership, info, option), [](WebRtcPusher *ptr) {
ptr->onDestory();
delete ptr;
});
@ -34,8 +33,7 @@ WebRtcPusher::WebRtcPusher(const EventPoller::Ptr &poller,
const RtspMediaSource::Ptr &src,
const std::shared_ptr<void> &ownership,
const MediaInfo &info,
const ProtocolOption &option,
bool preferred_tcp) : WebRtcTransportImp(poller,preferred_tcp) {
const ProtocolOption &option) : WebRtcTransportImp(poller) {
_media_info = info;
_push_src = src;
_push_src_ownership = ownership;

View File

@ -20,8 +20,7 @@ class WebRtcPusher : public WebRtcTransportImp, public MediaSourceEvent {
public:
using Ptr = std::shared_ptr<WebRtcPusher>;
static Ptr create(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src,
const std::shared_ptr<void> &ownership, const MediaInfo &info, const ProtocolOption &option, bool preferred_tcp = false);
const std::shared_ptr<void> &ownership, const MediaInfo &info, const ProtocolOption &option);
protected:
///////WebRtcTransportImp override///////
@ -53,7 +52,7 @@ protected:
private:
WebRtcPusher(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src,
const std::shared_ptr<void> &ownership, const MediaInfo &info, const ProtocolOption &option, bool preferred_tcp);
const std::shared_ptr<void> &ownership, const MediaInfo &info, const ProtocolOption &option);
private:
bool _simulcast = false;

View File

@ -31,7 +31,6 @@
#define RTP_CNAME "zlmediakit-rtp"
#define RTP_LABEL "zlmediakit-label"
#define RTP_MSLABEL "zlmediakit-mslabel"
#define RTP_MSID RTP_MSLABEL " " RTP_LABEL
using namespace std;
@ -55,6 +54,9 @@ const string kStartBitrate = RTC_FIELD "start_bitrate";
const string kMaxBitrate = RTC_FIELD "max_bitrate";
const string kMinBitrate = RTC_FIELD "min_bitrate";
// 数据通道设置
const string kDataChannelEcho = RTC_FIELD "datachannel_echo";
static onceToken token([]() {
mINI::Instance()[kTimeOutSec] = 15;
mINI::Instance()[kExternIP] = "";
@ -65,6 +67,8 @@ static onceToken token([]() {
mINI::Instance()[kStartBitrate] = 0;
mINI::Instance()[kMaxBitrate] = 0;
mINI::Instance()[kMinBitrate] = 0;
mINI::Instance()[kDataChannelEcho] = true;
});
} // namespace RTC
@ -250,22 +254,47 @@ void WebRtcTransport::OnDtlsTransportApplicationDataReceived(
#ifdef ENABLE_SCTP
void WebRtcTransport::OnSctpAssociationConnecting(RTC::SctpAssociation *sctpAssociation) {
TraceL << getIdentifier();
try {
NOTICE_EMIT(BroadcastRtcSctpConnectArgs, Broadcast::kBroadcastRtcSctpConnecting, *this);
} catch (std::exception &ex) {
WarnL << "Exception occurred: " << ex.what();
}
}
void WebRtcTransport::OnSctpAssociationConnected(RTC::SctpAssociation *sctpAssociation) {
InfoL << getIdentifier();
try {
NOTICE_EMIT(BroadcastRtcSctpConnectArgs, Broadcast::kBroadcastRtcSctpConnected, *this);
} catch (std::exception &ex) {
WarnL << "Exception occurred: " << ex.what();
}
}
void WebRtcTransport::OnSctpAssociationFailed(RTC::SctpAssociation *sctpAssociation) {
WarnL << getIdentifier();
try {
NOTICE_EMIT(BroadcastRtcSctpConnectArgs, Broadcast::kBroadcastRtcSctpFailed, *this);
} catch (std::exception &ex) {
WarnL << "Exception occurred: " << ex.what();
}
}
void WebRtcTransport::OnSctpAssociationClosed(RTC::SctpAssociation *sctpAssociation) {
InfoL << getIdentifier();
try {
NOTICE_EMIT(BroadcastRtcSctpConnectArgs, Broadcast::kBroadcastRtcSctpClosed, *this);
} catch (std::exception &ex) {
WarnL << "Exception occurred: " << ex.what();
}
}
void WebRtcTransport::OnSctpAssociationSendData(
RTC::SctpAssociation *sctpAssociation, const uint8_t *data, size_t len) {
try {
NOTICE_EMIT(BroadcastRtcSctpSendArgs, Broadcast::kBroadcastRtcSctpSend, *this, data, len);
} catch (std::exception &ex) {
WarnL << "Exception occurred: " << ex.what();
}
_dtls_transport->SendApplicationData(data, len);
}
@ -274,8 +303,18 @@ void WebRtcTransport::OnSctpAssociationMessageReceived(
InfoL << getIdentifier() << " " << streamId << " " << ppid << " " << len << " " << string((char *)msg, len);
RTC::SctpStreamParameters params;
params.streamId = streamId;
// 回显数据
_sctp->SendSctpMessage(params, ppid, msg, len);
GET_CONFIG(bool, datachannel_echo, Rtc::kDataChannelEcho);
if (datachannel_echo) {
// 回显数据
_sctp->SendSctpMessage(params, ppid, msg, len);
}
try {
NOTICE_EMIT(BroadcastRtcSctpReceivedArgs, Broadcast::kBroadcastRtcSctpReceived, *this, streamId, ppid, msg, len);
} catch (std::exception &ex) {
WarnL << "Exception occurred: " << ex.what();
}
}
#endif
@ -339,6 +378,12 @@ void WebRtcTransport::setRemoteDtlsFingerprint(const RtcSession &remote) {
}
void WebRtcTransport::onRtcConfigure(RtcConfigure &configure) const {
SdpAttrFingerprint fingerprint;
fingerprint.algorithm = _offer_sdp->media[0].fingerprint.algorithm;
fingerprint.hash = getFingerprint(fingerprint.algorithm, _dtls_transport);
configure.setDefaultSetting(
_ice_server->GetUsernameFragment(), _ice_server->GetPassword(), RtpDirection::sendrecv, fingerprint);
// 开启remb后关闭twcc因为开启twcc后remb无效
GET_CONFIG(size_t, remb_bit_rate, Rtc::kRembBitRate);
configure.enableTWCC(!remb_bit_rate);
@ -368,12 +413,7 @@ std::string WebRtcTransport::getAnswerSdp(const string &offer) {
setRemoteDtlsFingerprint(*_offer_sdp);
//// sdp 配置 ////
SdpAttrFingerprint fingerprint;
fingerprint.algorithm = _offer_sdp->media[0].fingerprint.algorithm;
fingerprint.hash = getFingerprint(fingerprint.algorithm, _dtls_transport);
RtcConfigure configure;
configure.setDefaultSetting(
_ice_server->GetUsernameFragment(), _ice_server->GetPassword(), RtpDirection::sendrecv, fingerprint);
onRtcConfigure(configure);
//// 生成answer sdp ////
@ -392,10 +432,6 @@ static bool isDtls(char *buf) {
return ((*buf > 19) && (*buf < 64));
}
static string getPeerAddress(RTC::TransportTuple *tuple) {
return tuple->get_peer_ip();
}
void WebRtcTransport::inputSockData(char *buf, int len, RTC::TransportTuple *tuple) {
if (RTC::StunPacket::IsStun((const uint8_t *)buf, len)) {
std::unique_ptr<RTC::StunPacket> packet(RTC::StunPacket::Parse((const uint8_t *)buf, len));
@ -412,7 +448,7 @@ void WebRtcTransport::inputSockData(char *buf, int len, RTC::TransportTuple *tup
}
if (isRtp(buf, len)) {
if (!_srtp_session_recv) {
WarnL << "received rtp packet when dtls not completed from:" << getPeerAddress(tuple);
WarnL << "received rtp packet when dtls not completed from:" << tuple->get_peer_ip();
return;
}
if (_srtp_session_recv->DecryptSrtp((uint8_t *)buf, &len)) {
@ -422,7 +458,7 @@ void WebRtcTransport::inputSockData(char *buf, int len, RTC::TransportTuple *tup
}
if (isRtcp(buf, len)) {
if (!_srtp_session_recv) {
WarnL << "received rtcp packet when dtls not completed from:" << getPeerAddress(tuple);
WarnL << "received rtcp packet when dtls not completed from:" << tuple->get_peer_ip();
return;
}
if (_srtp_session_recv->DecryptSrtcp((uint8_t *)buf, &len)) {
@ -494,8 +530,7 @@ void WebRtcTransportImp::OnDtlsTransportApplicationDataReceived(const RTC::DtlsT
#endif
}
WebRtcTransportImp::WebRtcTransportImp(const EventPoller::Ptr &poller,bool preferred_tcp)
: WebRtcTransport(poller), _preferred_tcp(preferred_tcp) {
WebRtcTransportImp::WebRtcTransportImp(const EventPoller::Ptr &poller) : WebRtcTransport(poller) {
InfoL << getIdentifier();
}
@ -635,7 +670,7 @@ void WebRtcTransportImp::onCheckAnswer(RtcSession &sdp) {
});
for (auto &m : sdp.media) {
m.addr.reset();
m.addr.address = extern_ips.empty() ? _localIp.empty() ? SockUtil::get_local_ip() : _localIp : extern_ips[0];
m.addr.address = extern_ips.empty() ? _local_ip.empty() ? SockUtil::get_local_ip() : _local_ip : extern_ips[0];
m.rtcp_addr.reset();
m.rtcp_addr.address = m.addr.address;
@ -667,9 +702,9 @@ void WebRtcTransportImp::onCheckAnswer(RtcSession &sdp) {
// 发送的ssrc我们随便定义因为在发送rtp时会修改为此值
ssrc.ssrc = m.type + RTP_SSRC_OFFSET;
ssrc.cname = RTP_CNAME;
ssrc.label = RTP_LABEL;
ssrc.label = std::string(RTP_LABEL) + '-' + m.mid;
ssrc.mslabel = RTP_MSLABEL;
ssrc.msid = RTP_MSID;
ssrc.msid = ssrc.mslabel + ' ' + ssrc.label;
if (m.getRelatedRtxPlan(m.plan[0].pt)) {
// rtx ssrc
@ -730,7 +765,7 @@ void WebRtcTransportImp::onRtcConfigure(RtcConfigure &configure) const {
return ret;
});
if (extern_ips.empty()) {
std::string local_ip = _localIp.empty() ? SockUtil::get_local_ip() : _localIp;
std::string local_ip = _local_ip.empty() ? SockUtil::get_local_ip() : _local_ip;
if (local_udp_port) { configure.addCandidate(*makeIceCandidate(local_ip, local_udp_port, 120, "udp")); }
if (local_tcp_port) { configure.addCandidate(*makeIceCandidate(local_ip, local_tcp_port, _preferred_tcp ? 125 : 115, "tcp")); }
} else {
@ -744,12 +779,16 @@ void WebRtcTransportImp::onRtcConfigure(RtcConfigure &configure) const {
}
}
void WebRtcTransportImp::setIceCandidate(vector<SdpAttrCandidate> cands) {
_cands = std::move(cands);
void WebRtcTransportImp::setPreferredTcp(bool flag) {
_preferred_tcp = flag;
}
void WebRtcTransportImp::setLocalIp(const std::string &localIp) {
_localIp = localIp;
void WebRtcTransportImp::setLocalIp(std::string local_ip) {
_local_ip = std::move(local_ip);
}
void WebRtcTransportImp::setIceCandidate(vector<SdpAttrCandidate> cands) {
_cands = std::move(cands);
}
///////////////////////////////////////////////////////////////////
@ -1239,21 +1278,14 @@ void WebRtcPluginManager::registerPlugin(const string &type, Plugin cb) {
_map_creator[type] = std::move(cb);
}
std::string exchangeSdp(const WebRtcInterface &exchanger, const std::string& offer) {
return const_cast<WebRtcInterface &>(exchanger).getAnswerSdp(offer);
}
void setLocalIp(const WebRtcInterface& exchanger, const std::string& localIp) {
return const_cast<WebRtcInterface &>(exchanger).setLocalIp(localIp);
}
void WebRtcPluginManager::setListener(Listener cb) {
lock_guard<mutex> lck(_mtx_creator);
_listener = std::move(cb);
}
void WebRtcPluginManager::getAnswerSdp(Session &sender, const string &type, const WebRtcArgs &args, const onCreateRtc &cb_in) {
onCreateRtc cb;
void WebRtcPluginManager::negotiateSdp(Session &sender, const string &type, const WebRtcArgs &args, const onCreateWebRtc &cb_in) {
onCreateWebRtc cb;
lock_guard<mutex> lck(_mtx_creator);
if (_listener) {
auto listener = _listener;
@ -1269,21 +1301,19 @@ void WebRtcPluginManager::getAnswerSdp(Session &sender, const string &type, cons
auto it = _map_creator.find(type);
if (it == _map_creator.end()) {
cb(WebRtcException(SockException(Err_other, "the type can not supported")));
cb_in(WebRtcException(SockException(Err_other, "the type can not supported")));
return;
}
it->second(sender, args, cb);
}
void echo_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginManager::onCreateRtc &cb) {
void echo_plugin(Session &sender, const WebRtcArgs &args, const onCreateWebRtc &cb) {
cb(*WebRtcEchoTest::create(EventPollerPool::Instance().getPoller()));
}
void push_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginManager::onCreateRtc &cb) {
void push_plugin(Session &sender, const WebRtcArgs &args, const onCreateWebRtc &cb) {
MediaInfo info(args["url"]);
bool preferred_tcp = args["preferred_tcp"];
Broadcast::PublishAuthInvoker invoker = [cb, info, preferred_tcp](const string &err, const ProtocolOption &option) mutable {
Broadcast::PublishAuthInvoker invoker = [cb, info](const string &err, const ProtocolOption &option) mutable {
if (!err.empty()) {
cb(WebRtcException(SockException(Err_other, err)));
return;
@ -1322,7 +1352,7 @@ void push_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
push_src_ownership = push_src->getOwnership();
push_src->setProtocolOption(option);
}
auto rtc = WebRtcPusher::create(EventPollerPool::Instance().getPoller(), push_src, push_src_ownership, info, option, preferred_tcp);
auto rtc = WebRtcPusher::create(EventPollerPool::Instance().getPoller(), push_src, push_src_ownership, info, option);
push_src->setListener(rtc);
cb(*rtc);
};
@ -1335,12 +1365,10 @@ void push_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
}
}
void play_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginManager::onCreateRtc &cb) {
void play_plugin(Session &sender, const WebRtcArgs &args, const onCreateWebRtc &cb) {
MediaInfo info(args["url"]);
bool preferred_tcp = args["preferred_tcp"];
auto session_ptr = static_pointer_cast<Session>(sender.shared_from_this());
Broadcast::AuthInvoker invoker = [cb, info, session_ptr, preferred_tcp](const string &err) mutable {
Broadcast::AuthInvoker invoker = [cb, info, session_ptr](const string &err) mutable {
if (!err.empty()) {
cb(WebRtcException(SockException(Err_other, err)));
return;
@ -1356,7 +1384,7 @@ void play_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
}
// 还原成rtc目的是为了hook时识别哪种播放协议
info.schema = "rtc";
auto rtc = WebRtcPlayer::create(EventPollerPool::Instance().getPoller(), src, info, preferred_tcp);
auto rtc = WebRtcPlayer::create(EventPollerPool::Instance().getPoller(), src, info);
cb(*rtc);
});
};
@ -1369,39 +1397,63 @@ void play_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
}
}
static void set_webrtc_cands(const WebRtcArgs &args, const WebRtcInterface &rtc) {
vector<SdpAttrCandidate> cands;
static void setWebRtcArgs(const WebRtcArgs &args, WebRtcInterface &rtc) {
{
auto cand_str = trim(args["cand_udp"]);
auto ip_port = toolkit::split(cand_str, ":");
if (ip_port.size() == 2) {
static auto is_vaild_ip = [](const std::string &ip) -> bool {
int a, b, c, d;
return sscanf(ip.c_str(), "%d.%d.%d.%d", &a, &b, &c, &d) == 4;
};
std::string host = args["Host"];
if (!host.empty()) {
auto local_ip = host.substr(0, host.find(':'));
if (!is_vaild_ip(local_ip) || local_ip == "127.0.0.1") {
local_ip = "";
}
rtc.setLocalIp(std::move(local_ip));
}
}
bool preferred_tcp = args["preferred_tcp"];
{
rtc.setPreferredTcp(preferred_tcp);
}
{
vector<SdpAttrCandidate> cands;
{
auto cand_str = trim(args["cand_udp"]);
auto ip_port = toolkit::split(cand_str, ":");
if (ip_port.size() == 2) {
// udp优先
auto ice_cand = makeIceCandidate(ip_port[0], atoi(ip_port[1].data()), preferred_tcp ? 100 : 120, "udp");
cands.emplace_back(std::move(*ice_cand));
}
}
{
auto cand_str = trim(args["cand_tcp"]);
auto ip_port = toolkit::split(cand_str, ":");
if (ip_port.size() == 2) {
// tcp模式
auto ice_cand = makeIceCandidate(ip_port[0], atoi(ip_port[1].data()), preferred_tcp ? 120 : 100, "tcp");
cands.emplace_back(std::move(*ice_cand));
}
}
if (!cands.empty()) {
// udp优先
auto ice_cand = makeIceCandidate(ip_port[0], atoi(ip_port[1].data()), 120, "udp");
cands.emplace_back(std::move(*ice_cand));
rtc.setIceCandidate(std::move(cands));
}
}
{
auto cand_str = trim(args["cand_tcp"]);
auto ip_port = toolkit::split(cand_str, ":");
if (ip_port.size() == 2) {
// tcp模式
auto ice_cand = makeIceCandidate(ip_port[0], atoi(ip_port[1].data()), 100, "tcp");
cands.emplace_back(std::move(*ice_cand));
}
}
if (!cands.empty()) {
// udp优先
const_cast<WebRtcInterface &>(rtc).setIceCandidate(std::move(cands));
}
}
static onceToken s_rtc_auto_register([]() {
#if !defined (NDEBUG)
// debug模式才开启echo插件
WebRtcPluginManager::Instance().registerPlugin("echo", echo_plugin);
#endif
WebRtcPluginManager::Instance().registerPlugin("push", push_plugin);
WebRtcPluginManager::Instance().registerPlugin("play", play_plugin);
WebRtcPluginManager::Instance().setListener([](Session &sender, const std::string &type, const WebRtcArgs &args, const WebRtcInterface &rtc) {
set_webrtc_cands(args, rtc);
setWebRtcArgs(args, const_cast<WebRtcInterface&>(rtc));
});
});

View File

@ -42,13 +42,10 @@ public:
virtual const std::string& getIdentifier() const = 0;
virtual const std::string& deleteRandStr() const { static std::string s_null; return s_null; }
virtual void setIceCandidate(std::vector<SdpAttrCandidate> cands) {}
virtual void setLocalIp(const std::string &localIp) {}
virtual void setLocalIp(std::string localIp) {}
virtual void setPreferredTcp(bool flag) {}
};
std::string exchangeSdp(const WebRtcInterface &exchanger, const std::string& offer);
void setLocalIp(const WebRtcInterface &exchanger, const std::string &localIp);
class WebRtcException : public WebRtcInterface {
public:
WebRtcException(const SockException &ex) : _ex(ex) {};
@ -88,7 +85,7 @@ public:
* @param offer offer sdp
* @return answer sdp
*/
std::string getAnswerSdp(const std::string &offer) override;
std::string getAnswerSdp(const std::string &offer) override final;
/**
* id
@ -252,14 +249,16 @@ public:
void onSendRtp(const RtpPacket::Ptr &rtp, bool flush, bool rtx = false);
void createRtpChannel(const std::string &rid, uint32_t ssrc, MediaTrack &track);
void setIceCandidate(std::vector<SdpAttrCandidate> cands) override;
void removeTuple(RTC::TransportTuple* tuple);
void safeShutdown(const SockException &ex);
void setLocalIp(const std::string &localIp) override;
void setPreferredTcp(bool flag) override;
void setLocalIp(std::string local_ip) override;
void setIceCandidate(std::vector<SdpAttrCandidate> cands) override;
protected:
void OnIceServerSelectedTuple(const RTC::IceServer *iceServer, RTC::TransportTuple *tuple) override;
WebRtcTransportImp(const EventPoller::Ptr &poller,bool preferred_tcp = false);
WebRtcTransportImp(const EventPoller::Ptr &poller);
void OnDtlsTransportApplicationDataReceived(const RTC::DtlsTransport *dtlsTransport, const uint8_t *data, size_t len) override;
void onStartWebRTC() override;
void onSendSockData(Buffer::Ptr buf, bool flush = true, RTC::TransportTuple *tuple = nullptr) override;
@ -273,7 +272,7 @@ protected:
void onCreate() override;
void onDestory() override;
void onShutdown(const SockException &ex) override;
virtual void onRecvRtp(MediaTrack &track, const std::string &rid, RtpPacket::Ptr rtp) = 0;
virtual void onRecvRtp(MediaTrack &track, const std::string &rid, RtpPacket::Ptr rtp) {}
void updateTicker();
float getLossRate(TrackType type);
void onRtcpBye() override;
@ -289,7 +288,7 @@ private:
void onCheckAnswer(RtcSession &sdp);
private:
bool _preferred_tcp;
bool _preferred_tcp = false;
uint16_t _rtx_seq[2] = {0, 0};
//用掉的总流量
uint64_t _bytes_usage = 0;
@ -310,8 +309,8 @@ private:
//根据接收rtp的pt获取相关信息
std::unordered_map<uint8_t/*pt*/, std::unique_ptr<WrappedMediaTrack>> _pt_to_track;
std::vector<SdpAttrCandidate> _cands;
//源访问的hostip
std::string _localIp;
//http访问时的host ip
std::string _local_ip;
};
class WebRtcTransportManager {
@ -333,21 +332,20 @@ private:
class WebRtcArgs : public std::enable_shared_from_this<WebRtcArgs> {
public:
virtual ~WebRtcArgs() = default;
virtual variant operator[](const std::string &key) const = 0;
};
using onCreateWebRtc = std::function<void(const WebRtcInterface &rtc)>;
class WebRtcPluginManager {
public:
using onCreateRtc = std::function<void(const WebRtcInterface &rtc)>;
using Plugin = std::function<void(Session &sender, const WebRtcArgs &args, const onCreateRtc &cb)>;
using Plugin = std::function<void(Session &sender, const WebRtcArgs &args, const onCreateWebRtc &cb)>;
using Listener = std::function<void(Session &sender, const std::string &type, const WebRtcArgs &args, const WebRtcInterface &rtc)>;
static WebRtcPluginManager &Instance();
void registerPlugin(const std::string &type, Plugin cb);
void getAnswerSdp(Session &sender, const std::string &type, const WebRtcArgs &args, const onCreateRtc &cb);
void setListener(Listener cb);
void negotiateSdp(Session &sender, const std::string &type, const WebRtcArgs &args, const onCreateWebRtc &cb);
private:
WebRtcPluginManager() = default;

View File

@ -115,17 +115,10 @@
document.getElementsByName("method").forEach((el,idx) => {
el.checked = el.value === type;
el.onclick = function(e) {
let url = new URL(document.getElementById('streamUrl').value);
const url = new URL(document.getElementById('streamUrl').value);
url.searchParams.set("type",el.value);
document.getElementById('streamUrl').value = url.toString();
if(el.value == "play"){
recvOnly = true;
}else if(el.value == "echo"){
recvOnly = false;
}else{
recvOnly = false;
}
recvOnly = 'play' === el.value;
};
});
@ -145,6 +138,25 @@
let h = parseInt(res.pop());
let w = parseInt(res.pop());
const url = new URL(document.getElementById('streamUrl').value);
const newUrl = new URL(window.location.href);
let count = 0;
if (url.searchParams.has('app')) {
newUrl.searchParams.set('app', url.searchParams.get('app'));
count++;
}
if (url.searchParams.has('stream')) {
newUrl.searchParams.set('stream', url.searchParams.get('stream'));
count++;
}
if (url.searchParams.has('type')) {
newUrl.searchParams.set('type', url.searchParams.get('type'));
count++;
}
if (count > 0) {
window.history.pushState(null, null, newUrl);
}
player = new ZLMRTCClient.Endpoint(
{
element: document.getElementById('video'),// video 标签