Add zlmediakit.

This commit is contained in:
luocai
2024-03-13 18:01:36 +08:00
parent f34bfbc22f
commit 80b76f410e
260 changed files with 98240 additions and 23 deletions

View File

@ -1,17 +1,33 @@
add_executable(PassengerStatistics main.cpp
DetectAlgorithm.h DetectAlgorithm.cpp
ImageUtilities.h ImageUtilities.cpp
Live555RtspPusher.h Live555RtspPusher.cpp
RtspServer.h RtspServer.cpp
VideoInput.h VideoInput.cpp
)
target_include_directories(PassengerStatistics
PRIVATE ${CMAKE_SOURCE_DIR}/3rdparty/libopencv/include
PRIVATE ${CMAKE_SOURCE_DIR}/3rdparty/rw_mpp/include
PRIVATE ${CMAKE_SOURCE_DIR}/3rdparty/ds_pedestrian_mot_hisi/include
PRIVATE ${ZLMediaKit_INCLUDE_DIR}
)
target_link_directories(PassengerStatistics
PRIVATE ${CMAKE_SOURCE_DIR}/3rdparty/libopencv/libs
PRIVATE ${CMAKE_SOURCE_DIR}/3rdparty/rw_mpp/lib
PRIVATE ${CMAKE_SOURCE_DIR}/3rdparty/ds_pedestrian_mot_hisi/libs
PRIVATE ${ZLMediaKit_LIBRARY_DIRS}
PRIVATE ${OPENSSL_LIBRARY_DIRS}
)
target_link_libraries(PassengerStatistics
PRIVATE Universal
PRIVATE ${OPENSSL_LIBRARIES}
PRIVATE rw_mpp
PRIVATE ds_pedestrian_mot_Hi3516DV500
PRIVATE mk_api
PRIVATE opencv_core
PRIVATE opencv_imgcodecs
PRIVATE opencv_imgproc
)

50
Main/DetectAlgorithm.cpp Normal file
View File

@ -0,0 +1,50 @@
#include "DetectAlgorithm.h"
#include "BoostLog.h"
#include "ImageUtilities.h"
#include <ds_pedestrian_mot_hisi.h>
#include <filesystem>
#include <thread>
DetectAlgorithm::DetectAlgorithm() {
}
DetectAlgorithm::~DetectAlgorithm() {
if (m_handle != nullptr) {
ds_pedestrian_hisi_release(&m_handle);
}
}
void DetectAlgorithm::detect(const uint8_t *nv21ImageData, uint64_t frameIndex) {
if (m_handle == nullptr) { // 一定得在这里执行
initialize();
}
ImageUtilities::NV21ToBGR24(nv21ImageData, m_rgbImageBuffer.data(), DetectWidth, DetectHeight);
std::vector<PedestrianRect> pedestrians;
std::vector<io_TrackData> tracks;
ds_pedestrian_det_hisi(m_handle, m_rgbImageBuffer.data(), pedestrians);
ds_pedestrian_track_hisi(m_handle, m_rgbImageBuffer.data(), frameIndex, pedestrians, tracks);
LOG(info) << "frame: " << frameIndex << ", pedestrians: " << pedestrians.size() << ", tracks: " << tracks.size();
}
void DetectAlgorithm::initialize() {
constexpr auto licensePath = "/kdata/net.lic";
bool licenseExisted = std::filesystem::exists(licensePath);
if (licenseExisted && std::filesystem::file_size(licensePath) <= 0) {
LOG(warning) << "license " << licensePath << " content is empty, remove it.";
std::filesystem::remove(licensePath);
licenseExisted = false;
}
ds_pedestrian_hisi_set_lic_path("/kdata");
int status = ds_pedestrian_hisi_init(&m_handle, "/system/models/ds_mot_m0_2000.bin",
"/system/models/ds_mot_m1_2000.bin", DetectWidth, DetectHeight, 3);
if (status != 0) {
LOG(error) << "ds_pedestrian_hisi_init() failed, status: " << status;
m_handle = nullptr;
} else {
LOG(info) << "detect algorithm initialization successfully.";
}
if (!licenseExisted && std::filesystem::exists(licensePath)) {
system("sync");
}
}

21
Main/DetectAlgorithm.h Normal file
View File

@ -0,0 +1,21 @@
#ifndef __DETECTALGORITHM_H__
#define __DETECTALGORITHM_H__
#include <array>
#include <cstdint>
class DetectAlgorithm {
public:
constexpr static uint32_t DetectWidth = 576;
constexpr static uint32_t DetectHeight = 320;
DetectAlgorithm();
~DetectAlgorithm();
void detect(const uint8_t *nv21ImageData, uint64_t frameIndex);
void initialize();
private:
void *m_handle = nullptr;
std::array<uint8_t, DetectWidth * DetectHeight * 3> m_rgbImageBuffer;
};
#endif // __DETECTALGORITHM_H__

10
Main/ImageUtilities.cpp Normal file
View File

@ -0,0 +1,10 @@
#include "ImageUtilities.h"
#include <opencv2/opencv.hpp>
namespace ImageUtilities {
void NV21ToBGR24(const uint8_t *nv21Data, uint8_t *bgr24Data, int width, int height) {
cv::Mat nv21Image(height + height / 2, width, CV_8UC1, const_cast<unsigned char *>(nv21Data));
cv::Mat bgrImage(height, width, CV_8UC3, bgr24Data);
cv::cvtColor(nv21Image, bgrImage, cv::COLOR_YUV2BGR_NV21);
}
} // namespace ImageUtilities

9
Main/ImageUtilities.h Normal file
View File

@ -0,0 +1,9 @@
#ifndef __IMAGEUTILITIES_H__
#define __IMAGEUTILITIES_H__
#include <cstdint>
namespace ImageUtilities {
void NV21ToBGR24(const uint8_t *nv21Data, uint8_t *bgr24Data, int width, int height);
}
#endif // __IMAGEUTILITIES_H__

62
Main/RtspServer.cpp Normal file
View File

@ -0,0 +1,62 @@
#include "RtspServer.h"
#include <chrono>
#include <cstring>
#include <mk_common.h>
#include <mk_h264_splitter.h>
#include <mk_media.h>
#include <thread>
constexpr auto iniConfig = R"(
[rtsp]
lowLatency=1
)";
class RtspServerPrivate {
public:
mk_media media;
mk_h264_splitter splitter;
};
static void on_h264_frame(void *user_data, mk_h264_splitter splitter, const char *data, int size) {
using namespace std::chrono_literals;
std::this_thread::sleep_for(40ms);
static int dts = 0;
mk_frame frame = mk_frame_create(MKCodecH264, dts, dts, data, size, NULL, NULL);
dts += 40;
mk_media_input_frame((mk_media)user_data, frame);
mk_frame_unref(frame);
}
RtspServer::RtspServer() : m_d(new RtspServerPrivate()) {
mk_config config;
std::memset(&config, 0, sizeof(mk_config));
config.ini = iniConfig;
config.ini_is_path = 0;
config.log_mask = LOG_CONSOLE;
config.ssl_is_path = 1;
mk_env_init(&config);
mk_rtsp_server_start(554, 0);
m_d->media = mk_media_create("__defaultVhost__", "live", "video", 0, 0, 0);
codec_args v_args = {0};
mk_track v_track = mk_track_create(MKCodecH264, &v_args);
mk_media_init_track(m_d->media, v_track);
mk_media_init_complete(m_d->media);
mk_track_unref(v_track);
m_d->splitter = mk_h264_splitter_create(on_h264_frame, m_d->media, 0);
}
RtspServer::~RtspServer() {
mk_h264_splitter_release(m_d->splitter);
mk_media_release(m_d->media);
mk_stop_all_server();
if (m_d != nullptr) {
delete m_d;
}
}
void RtspServer::push(const uint8_t *data, uint32_t size) {
mk_h264_splitter_input_data(m_d->splitter, reinterpret_cast<const char *>(data), size);
}

18
Main/RtspServer.h Normal file
View File

@ -0,0 +1,18 @@
#ifndef __RTSPSERVER_H__
#define __RTSPSERVER_H__
#include <string>
class RtspServerPrivate;
class RtspServer {
public:
RtspServer();
~RtspServer();
void push(const uint8_t *data, uint32_t size);
private:
RtspServerPrivate *m_d = nullptr;
};
#endif // __RTSPSERVER_H__

View File

@ -1,15 +1,33 @@
#include "VideoInput.h"
#include "BoostLog.h"
#include "DetectAlgorithm.h"
#include "rw_mpp_api.h"
#include <chrono>
VideoInput::VideoInput(int32_t width, int32_t height) : m_width(width), m_height(height) {
class VideoInputPrivate {
public:
void processFrame(S_mpp_img &image);
void processImage(S_mpp_img &image, S_mpp_img &detectImage, uint64_t frameIndex);
int32_t encodeChannel = -1;
int32_t scaleChannel = -1;
S_vdec_config decoderConfig;
std::shared_ptr<DetectAlgorithm> m_detector;
};
VideoInput::VideoInput(int32_t width, int32_t height) : m_d(new VideoInputPrivate()), m_width(width), m_height(height) {
int status = rw_mpp__vdec_init();
if (status != 0) {
LOG(error) << "rw_mpp__vdec_init() failed, status: " << status;
}
m_d->m_detector = std::make_shared<DetectAlgorithm>();
}
VideoInput::~VideoInput() {
if (isStarted()) {
stop();
}
rw_mpp__vdec_finalize();
}
bool VideoInput::start() {
@ -43,9 +61,20 @@ void VideoInput::stop() {
m_thread.join();
}
rw_mpp__video_stop(m_vid);
rw_mpp__venc_stop(m_encodeChannel);
if (m_decodeChannel >= 0) {
rw_mpp__vdec_stop(m_decodeChannel);
m_decodeChannel = -1;
}
if (m_d->scaleChannel >= 0) {
rw_mpp__vscale_stop(m_d->scaleChannel);
m_d->scaleChannel = -1;
}
if (m_d->encodeChannel >= 0) {
rw_mpp__venc_stop(m_d->encodeChannel);
m_d->encodeChannel = -1;
}
m_vid = -1;
m_encodeChannel = -1;
}
bool VideoInput::isStarted() const {
@ -53,7 +82,7 @@ bool VideoInput::isStarted() const {
}
bool VideoInput::startEncode() {
m_encodeChannel = 0;
m_d->encodeChannel = 0;
S_venc_config config;
memset(&config, 0, sizeof(S_venc_config));
config.codec = CODEC_H264;
@ -71,12 +100,42 @@ bool VideoInput::startEncode() {
vbr.max_bitrate = 1024;
vbr.stats_time = 1;
config.rc_param = &vbr;
int status = rw_mpp__venc_start(m_encodeChannel, &config, -1);
int status = rw_mpp__venc_start(m_d->encodeChannel, &config, -1);
if (status != 0) {
LOG(error) << "rw_mpp__venc_start() failed, status: " << status;
} else {
m_encodeThread = std::thread(&VideoInput::encodeRun, this);
}
m_d->scaleChannel = 0;
S_vscale_cfg scaleConfig = {2960, 1664, DetectAlgorithm::DetectWidth, DetectAlgorithm::DetectHeight};
status = rw_mpp__vscale_start(m_d->scaleChannel, &scaleConfig);
if (status != 0) {
LOG(error) << "rw_mpp__vscale_start() failed, status: " << status;
}
return status == 0;
}
bool VideoInput::startFileInput(const std::string &path, int32_t width, int32_t height) {
m_width = width;
m_height = height;
m_path = path;
m_ifs = std::make_shared<std::ifstream>(path, std::ifstream::binary);
m_exit = false;
m_decodeChannel = 0;
memset(&m_d->decoderConfig, 0, sizeof(S_vdec_config));
m_d->decoderConfig.send_mode = VDEC_SEND__STREAM;
m_d->decoderConfig.codec = CODEC_H264;
m_d->decoderConfig.raw_max_width = 2960;
m_d->decoderConfig.raw_max_height = 1664;
m_d->decoderConfig.width = width;
m_d->decoderConfig.height = height;
int status = rw_mpp__vdec_start(m_decodeChannel, &m_d->decoderConfig);
if (status != 0) {
LOG(error) << "rw_mpp__vdec_start() failed, status: " << status;
} else {
m_thread = std::thread(&VideoInput::fakeRun, this);
}
return status == 0;
}
@ -84,43 +143,109 @@ void VideoInput::setPacketHandler(const PacketHandler &hanlder) {
m_handler = hanlder;
}
void VideoInputPrivate::processImage(S_mpp_img &image, S_mpp_img &detectImage, uint64_t frameIndex) {
m_detector->detect(detectImage.nv21, frameIndex);
}
void VideoInputPrivate::processFrame(S_mpp_img &image) {
static uint64_t frameIndex = 0;
S_mpp_img detectImage;
memset(&detectImage, 0, sizeof(S_mpp_img));
int status = rw_mpp__vscale_exec(scaleChannel, &image, &detectImage);
if (status == 0) {
frameIndex++;
processImage(image, detectImage, frameIndex);
status = rw_mpp__vscale_free(scaleChannel, &detectImage);
} else {
LOG(warning) << "rw_mpp__vscale_exec() failed, status: " << status;
}
if (encodeChannel >= 0) {
rw_mpp__venc_send(encodeChannel, &image);
}
}
void VideoInput::run() {
using namespace std::chrono_literals;
S_mpp_img img;
S_mpp_img image;
while (!m_exit) {
int status = rw_mpp__video_recv(m_vid, &img, 2000);
int status = rw_mpp__video_recv(m_vid, &image, 2000);
if (status != 0) {
LOG(error) << "rw_mpp__video_recv() failed, status: " << status;
std::this_thread::sleep_for(500ms);
continue;
}
// LOG(info) << "camera frame size: " << img.width << "x" << img.height;
if (m_encodeChannel >= 0) {
rw_mpp__venc_send(m_encodeChannel, &img);
}
status = rw_mpp__video_free(m_vid, &img);
m_d->processFrame(image);
status = rw_mpp__video_free(m_vid, &image);
if (status != 0) {
LOG(error) << "rw_mpp__video_free() failed, status: " << status;
}
}
}
void VideoInput::fakeRun() {
using namespace std::chrono;
using namespace std::chrono_literals;
S_mpp_vdec_frame frame;
S_mpp_img image;
int bufferSize = m_width * m_height * 3 / 2;
if (bufferSize > 1024 * 64) bufferSize = 1024 * 64;
char *buffer = new char[bufferSize];
std::chrono::system_clock::time_point last;
while (!m_exit) {
while (m_ifs->read(buffer, bufferSize) && !m_exit) {
int readSize = m_ifs->gcount();
frame.data = reinterpret_cast<uint8_t *>(buffer);
frame.len = readSize;
int status = rw_mpp__vdec_send(m_decodeChannel, &frame);
if (status != 0) {
LOG(error) << "rw_mpp__vdec_send() failed, status: " << status;
std::this_thread::sleep_for(10ms);
continue;
}
int receiveStatus = 0;
while (receiveStatus == 0) {
receiveStatus = rw_mpp__vdec_recv(m_decodeChannel, &image, 20);
if (receiveStatus != 0) {
std::this_thread::sleep_for(10ms);
break;
} else {
m_d->processFrame(image);
rw_mpp__vdec_free(m_decodeChannel, &image);
}
auto now = std::chrono::system_clock::now();
auto elapsed = duration_cast<milliseconds>(now - last);
last = now;
// LOG(info) << "process, elapsed: " << elapsed.count();
if (elapsed <= milliseconds(69)) {
std::this_thread::sleep_for(69ms - elapsed);
}
}
}
m_ifs = std::make_shared<std::ifstream>(m_path, std::ifstream::binary);
rw_mpp__vdec_send_end(0);
}
if (buffer != nullptr) delete buffer;
}
void VideoInput::encodeRun() {
using namespace std::chrono_literals;
S_mpp_venc_frame frame;
while (!m_exit) {
int status = rw_mpp__venc_recv(m_encodeChannel, &frame, 1000);
int status = rw_mpp__venc_recv(m_d->encodeChannel, &frame, 1000);
if (status != 0) {
std::this_thread::sleep_for(500ms);
LOG(error) << "rw_mpp__venc_recv() failed, status: " << status;
continue;
}
LOG(info) << "encode frame data size: " << frame.len << ", is key frame: " << frame.is_key_frame;
// LOG(info) << "encode frame data size: " << frame.len << ", is key frame: " << frame.is_key_frame;
if (m_handler) {
m_handler(frame.data, frame.len);
}
status = rw_mpp__venc_free(m_encodeChannel, &frame);
status = rw_mpp__venc_free(m_d->encodeChannel, &frame);
if (status != 0) {
LOG(error) << "rw_mpp__venc_free() failed, status: " << status;
}

View File

@ -1,10 +1,13 @@
#ifndef __VIDEOINPUT_H__
#define __VIDEOINPUT_H__
#include <fstream>
#include <functional>
#include <string>
#include <thread>
class VideoInputPrivate;
class VideoInput {
public:
using PacketHandler = std::function<void(const uint8_t *, uint32_t)>;
@ -14,18 +17,24 @@ public:
void stop();
bool isStarted() const;
bool startEncode();
bool startFileInput(const std::string &path, int32_t width, int32_t height);
void setPacketHandler(const PacketHandler &hanlder);
protected:
void run();
void fakeRun();
void encodeRun();
private:
VideoInputPrivate *m_d = nullptr;
int32_t m_width;
int32_t m_height;
std::string m_path;
std::shared_ptr<std::ifstream> m_ifs;
int32_t m_vid = -1;
int32_t m_encodeChannel = -1;
int32_t m_decodeChannel = -1;
bool m_exit = true;
std::thread m_thread;
std::thread m_encodeThread;

View File

@ -2,6 +2,7 @@
#include "DateTime.h"
#include "IoContext.h"
#include "Live555RtspPusher.h"
#include "RtspServer.h"
#include "VideoInput.h"
#include "rw_mpp_api.h"
#include <boost/asio/signal_set.hpp>
@ -17,6 +18,7 @@ int main(int argc, char const *argv[]) {
try {
auto ioContext = Amass::Singleton<IoContext>::instance<Amass::Construct>();
auto pusher = std::make_shared<Live555RtspPusher>(*ioContext->ioContext());
auto rtsp = std::make_shared<RtspServer>();
std::shared_ptr<std::ofstream> ofs;
std::ostringstream oss;
@ -28,9 +30,11 @@ int main(int argc, char const *argv[]) {
auto video = std::make_shared<VideoInput>(2592, 1536);
video->setPacketHandler([&](const uint8_t *data, uint32_t size) {
ofs->write(reinterpret_cast<const char *>(data), size);
pusher->push(data, size);
// pusher->push(data, size);
rtsp->push(data, size);
});
video->start();
// video->start();
video->startFileInput("/data/sdcard/HM1.264", 1280, 720);
video->startEncode();
boost::asio::signal_set signals(*ioContext->ioContext(), SIGINT);
signals.async_wait([&](boost::system::error_code const &, int signal) {