初步实现画线和视频显示。

This commit is contained in:
luocai 2024-08-13 20:06:10 +08:00
parent ea52804915
commit e7e5c89807
16 changed files with 684 additions and 16 deletions

61
AntiClipSettings.rc Normal file
View File

@ -0,0 +1,61 @@
// Microsoft Visual C++ generated resource script.
//
#define APSTUDIO_READONLY_SYMBOLS
/////////////////////////////////////////////////////////////////////////////
//
// Generated from the TEXTINCLUDE 2 resource.
//
#include "winres.h"
/////////////////////////////////////////////////////////////////////////////
#undef APSTUDIO_READONLY_SYMBOLS
/////////////////////////////////////////////////////////////////////////////
// 中文(简体,中国) resources
#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_CHS)
LANGUAGE LANG_CHINESE, SUBLANG_CHINESE_SIMPLIFIED
#pragma code_page(936)
#ifdef APSTUDIO_INVOKED
/////////////////////////////////////////////////////////////////////////////
//
// TEXTINCLUDE
//
2 TEXTINCLUDE BEGIN "#include "
"winres.h"
"\r\n"
"\0" END
3 TEXTINCLUDE BEGIN "\r\n"
"\0" END
#endif // APSTUDIO_INVOKED
/////////////////////////////////////////////////////////////////////////////
//
// Icon
//
// Icon with lowest ID value placed first to ensure application icon
// remains consistent on all systems.
IDI_ICON1 ICON "resources\\logo.ico"
#endif // 中文(简体,中国) resources
/////////////////////////////////////////////////////////////////////////////
#ifndef APSTUDIO_INVOKED
/////////////////////////////////////////////////////////////////////////////
//
// Generated from the TEXTINCLUDE 3 resource.
//
/////////////////////////////////////////////////////////////////////////////
#endif // not APSTUDIO_INVOKED

71
Application.cpp Normal file
View File

@ -0,0 +1,71 @@
#include "Application.h"
#include "BoostLog.h"
#include "Configuration.h"
#include "DeviceConnection.h"
#include "H264Palyer.h"
#include "VideoFrameProvider.h"
#include <QGuiApplication>
#include <QQmlApplicationEngine>
Application::Application(int &argc, char **argv)
: m_app(std::make_shared<QGuiApplication>(argc, argv)), m_videoFrameProvider(new VideoFrameProvider()),
m_player(std::make_shared<H264Palyer>()) {
m_app->setApplicationName(APPLICATION_NAME);
m_app->setApplicationVersion(QString("v%1_%2 build: %3 %4").arg(APP_VERSION, GIT_COMMIT_ID, __DATE__, __TIME__));
m_player->open();
}
QList<QPointF> Application::currentOpenDoorAreaPoints() const {
return m_currentOpenDoorAreaPoints;
}
void Application::setCurrentOpenDoorAreaPoints(const QList<QPointF> &points) {
if (m_currentOpenDoorAreaPoints != points) {
m_currentOpenDoorAreaPoints = points;
emit currentOpenDoorAreaPointsChanged();
}
}
void Application::onDeviceOpenDoorAreaPoints(const QList<QPointF> &points) {
setCurrentOpenDoorAreaPoints(points);
LOG(info) << "onDeviceOpenDoorAreaPoints: " << points.size();
}
int Application::exec() {
QQmlApplicationEngine engine;
engine.addImageProvider("videoframe", m_videoFrameProvider);
QObject::connect(
&engine, &QQmlApplicationEngine::objectCreationFailed, this, []() { QCoreApplication::exit(-1); },
Qt::QueuedConnection);
engine.loadFromModule("AntiClipSettings", "Main");
return m_app->exec();
}
void Application::open() {
LOG(info) << "Application::start";
m_device = new DeviceConnection();
connect(m_device, &DeviceConnection::currentOpenDoorAreaPointsChanged, this,
&Application::onDeviceOpenDoorAreaPoints);
m_device->connect();
m_device->setH264FrameCallback([this](const char *data, uint32_t size) {
auto image = m_player->decode((const uint8_t *)data, size);
if (image) {
m_videoFrameProvider->setImage(*image);
emit newVideoFrame();
}
});
}
void Application::start() {
m_device->start();
}
Application *Application::create(QQmlEngine *qmlEngine, QJSEngine *jsEngine) {
Application *ret = nullptr;
auto app = Amass::Singleton<Application>::instance();
if (app) {
ret = app.get();
QJSEngine::setObjectOwnership(ret, QJSEngine::CppOwnership);
}
return ret;
}

47
Application.h Normal file
View File

@ -0,0 +1,47 @@
#ifndef APPLICATION_H
#define APPLICATION_H
#include "Singleton.h"
#include <QObject>
#include <QPoint>
#include <QQmlEngine>
class QGuiApplication;
class DeviceConnection;
class VideoFrameProvider;
class H264Palyer;
class Application : public QObject {
Q_OBJECT
QML_NAMED_ELEMENT(App)
QML_SINGLETON
Q_PROPERTY(QList<QPointF> currentOpenDoorAreaPoints READ currentOpenDoorAreaPoints WRITE
setCurrentOpenDoorAreaPoints NOTIFY currentOpenDoorAreaPointsChanged)
friend class Amass::Singleton<Application>;
public:
QList<QPointF> currentOpenDoorAreaPoints() const;
void setCurrentOpenDoorAreaPoints(const QList<QPointF> &points);
int exec();
Q_INVOKABLE void open();
Q_INVOKABLE void start();
static Application *create(QQmlEngine *qmlEngine, QJSEngine *jsEngine);
signals:
void newVideoFrame();
void currentOpenDoorAreaPointsChanged();
protected:
Application(int &argc, char **argv);
void onDeviceOpenDoorAreaPoints(const QList<QPointF> &points);
private:
std::shared_ptr<QGuiApplication> m_app;
VideoFrameProvider *m_videoFrameProvider = nullptr;
std::shared_ptr<H264Palyer> m_player;
DeviceConnection *m_device = nullptr;
QList<QPointF> m_currentOpenDoorAreaPoints;
};
#endif // APPLICATION_H

View File

@ -1,22 +1,51 @@
cmake_minimum_required(VERSION 3.16)
project(AntiClipSettings VERSION 0.1 LANGUAGES CXX)
set(APPLICATION_NAME "T009上位机")
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
find_package(Qt6 6.5 REQUIRED COMPONENTS Quick)
set(Projects_ROOT E:/Projects)
set(Libraries_ROOT ${Projects_ROOT}/Libraries)
set(BOOST_ROOT ${Libraries_ROOT}/boost_1_85_0_msvc2022_64bit)
set(Boost_INCLUDE_DIR ${BOOST_ROOT}/include/boost-1_85)
option(Boost_USE_STATIC_LIBS OFF)
add_compile_definitions(
BOOST_USE_WINAPI_VERSION=BOOST_WINAPI_VERSION_WIN10
)
set(FFmpeg_ROOT ${Libraries_ROOT}/ffmpeg-7.0.1-full_build-shared)
set(FFmpeg_INCLUDE_DIR ${FFmpeg_ROOT}/include)
set(FFmpeg_LIB_DIR ${FFmpeg_ROOT}/lib)
find_package(Boost REQUIRED COMPONENTS json)
find_package(Qt6 REQUIRED COMPONENTS Qml Quick)
qt_standard_project_setup(REQUIRES 6.5)
execute_process(
COMMAND D:/msys64/usr/bin/git rev-parse --short HEAD
OUTPUT_VARIABLE GIT_COMMIT_ID
OUTPUT_STRIP_TRAILING_WHITESPACE
)
configure_file(Configuration.h.in Configuration.h)
qt_add_executable(AntiClipSettings
AntiClipSettings.rc
main.cpp
Application.h Application.cpp
DeviceConnection.h DeviceConnection.cpp
H264Palyer.h H264Palyer.cpp
VideoFrameProvider.h VideoFrameProvider.cpp
)
qt_add_qml_module(AntiClipSettings
URI AntiClipSettings
VERSION 1.0
QML_FILES
Main.qml
DeviceView.qml
)
# Qt for iOS sets MACOSX_BUNDLE_GUI_IDENTIFIER automatically since Qt 6.1.
@ -30,8 +59,27 @@ set_target_properties(AntiClipSettings PROPERTIES
WIN32_EXECUTABLE TRUE
)
add_subdirectory(${Projects_ROOT}/Kylin/Universal Universal)
target_include_directories(AntiClipSettings
PRIVATE ${FFmpeg_INCLUDE_DIR}
PRIVATE ${CMAKE_CURRENT_BINARY_DIR}
)
target_link_directories(AntiClipSettings
PRIVATE ${FFmpeg_LIB_DIR}
)
target_link_libraries(AntiClipSettings
PRIVATE Qt6::Qml
PRIVATE Qt6::Quick
PRIVATE Boost::json
PRIVATE avcodec
PRIVATE swscale
PRIVATE avutil
PRIVATE avdevice
PRIVATE avformat
PRIVATE Universal
)
include(GNUInstallDirs)
@ -40,3 +88,12 @@ install(TARGETS AntiClipSettings
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
)
qt_generate_deploy_qml_app_script(
TARGET AntiClipSettings
OUTPUT_SCRIPT deploy_script
MACOS_BUNDLE_POST_BUILD
NO_UNSUPPORTED_PLATFORM_ERROR
DEPLOY_USER_QML_MODULES_ON_UNSUPPORTED_PLATFORM
)
install(SCRIPT ${deploy_script})

3
Configuration.h.in Normal file
View File

@ -0,0 +1,3 @@
#define APPLICATION_NAME "@APPLICATION_NAME@"
#define GIT_COMMIT_ID "@GIT_COMMIT_ID@"
#define APP_VERSION "@PROJECT_VERSION@"

113
DeviceConnection.cpp Normal file
View File

@ -0,0 +1,113 @@
#include "DeviceConnection.h"
#include "BoostLog.h"
#include <QPointF>
#include <QTcpSocket>
#include <WinSock2.h>
#include <boost/json/object.hpp>
#include <boost/json/parse.hpp>
#include <boost/json/serialize.hpp>
DeviceConnection::DeviceConnection(QObject *parent) : QObject{parent} {
}
void DeviceConnection::connect() {
m_commandSocket = new QTcpSocket(this);
m_h264Socket = new QTcpSocket(this);
QObject::connect(m_commandSocket, &QTcpSocket::connected, this, &DeviceConnection::onConnected);
QObject::connect(m_h264Socket, &QTcpSocket::connected, this, &DeviceConnection::onConnected);
QObject::connect(m_h264Socket, &QTcpSocket::readyRead, this, &DeviceConnection::onH264ReadyRead);
QObject::connect(m_commandSocket, &QTcpSocket::readyRead, this, &DeviceConnection::onCommandReadyRead);
m_commandSocket->connectToHost("192.168.10.2", 8000);
m_h264Socket->connectToHost("192.168.10.2", 8000);
}
void DeviceConnection::start() {
boost::json::object request;
request["func"] = "openlivestream_setdata";
request["deviceid"] = "0";
boost::json::object data;
data["value"] = "1";
request["data"] = std::move(data);
auto text = boost::json::serialize(request);
m_h264Socket->write(text.data(), text.size());
}
void DeviceConnection::requestOpenDoorArea() {
boost::json::object request;
request["func"] = "a03opendoor1_getdata";
request["deviceid"] = "0";
boost::json::object data;
request["data"] = std::move(data);
auto text = boost::json::serialize(request);
m_commandSocket->write(text.data(), text.size());
LOG(info) << "requestOpenDoorArea";
}
void DeviceConnection::handleCommand(const std::string_view &replyText) {
auto replyValue = boost::json::parse(replyText);
auto &reply = replyValue.as_object();
auto &function = reply.at("func").as_string();
if (function == "a03opendoor1_getdata") {
auto &data = reply.at("data").as_object();
auto &pointArray = data.at("points").as_array();
QList<QPointF> points;
for (auto &p : pointArray) {
QPointF point;
auto &obj = p.as_object();
point.setX(obj.at("x").as_double());
point.setY(obj.at("y").as_double());
points.push_back(point);
}
emit currentOpenDoorAreaPointsChanged(points);
}
}
void DeviceConnection::onConnected() {
LOG(info) << "onConnected";
auto socket = dynamic_cast<QTcpSocket *>(sender());
if (socket == m_commandSocket) {
requestOpenDoorArea();
}
}
void DeviceConnection::setH264FrameCallback(H264FrameCallback &&callback) {
m_frameCallback = std::move(callback);
}
void DeviceConnection::onH264ReadyRead() {
auto data = m_h264Socket->readAll();
m_h264Buffer.push_back(data);
while (!m_h264Buffer.isEmpty()) {
auto packageSize = ntohl(*reinterpret_cast<uint32_t *>(m_h264Buffer.data()));
if (m_h264Buffer.size() < (packageSize + sizeof(uint32_t))) break;
// LOG(info) << "onH264ReadyRead " << data.size() << " " << packageSize;
if (m_receivedFirstJsonReply) {
if (m_frameCallback) {
m_frameCallback(m_h264Buffer.data() + sizeof(uint32_t), packageSize);
}
} else {
LOG(info) << "h264 reply: " << m_h264Buffer.data() + sizeof(uint32_t);
m_receivedFirstJsonReply = true;
}
m_h264Buffer.remove(0, packageSize + sizeof(uint32_t));
}
}
void DeviceConnection::onCommandReadyRead() {
auto data = m_commandSocket->readAll();
m_commandBuffer.push_back(data);
while (!m_commandBuffer.isEmpty()) {
auto packageSize = ntohl(*reinterpret_cast<uint32_t *>(m_commandBuffer.data()));
if (m_commandBuffer.size() < (packageSize + sizeof(uint32_t))) break;
LOG(info) << "h264 reply: " << m_commandBuffer.data() + sizeof(uint32_t);
handleCommand(std::string_view(m_commandBuffer.data() + sizeof(uint32_t), packageSize));
m_commandBuffer.remove(0, packageSize + sizeof(uint32_t));
}
}

39
DeviceConnection.h Normal file
View File

@ -0,0 +1,39 @@
#ifndef DEVICECONNECTION_H
#define DEVICECONNECTION_H
#include <QObject>
#include <string_view>
class QTcpSocket;
class DeviceConnection : public QObject {
Q_OBJECT
public:
using H264FrameCallback = std::function<void(const char *data, uint32_t size)>;
explicit DeviceConnection(QObject *parent = nullptr);
void setH264FrameCallback(H264FrameCallback &&callback);
void connect();
void start();
void requestOpenDoorArea();
signals:
void currentOpenDoorAreaPointsChanged(const QList<QPointF> &points);
protected:
void onConnected();
void onH264ReadyRead();
void onCommandReadyRead();
void handleCommand(const std::string_view &replyText);
private:
QTcpSocket *m_commandSocket = nullptr;
QTcpSocket *m_h264Socket = nullptr;
bool m_receivedFirstJsonReply = false;
QByteArray m_commandBuffer;
QByteArray m_h264Buffer;
H264FrameCallback m_frameCallback;
};
#endif // DEVICECONNECTION_H

116
DeviceView.qml Normal file
View File

@ -0,0 +1,116 @@
import QtQuick
import AntiClipSettings
Item {
id: root
property int dargWidth: 10
property var openDoorAreaPoints: []
Image {
id: image
anchors.centerIn: parent
cache: false
fillMode: Image.PreserveAspectFit
source: "image://videoframe/"
property real aspectRatio: 16 / 9
width: Math.min(root.width, root.height * aspectRatio)
height: width / aspectRatio
Canvas {
id: canvas
anchors.fill: parent
onPaint: {
var ctx = canvas.getContext("2d")
ctx.clearRect(0, 0, canvas.width, canvas.height)
if (openDoorAreaPoints.length > 0) {
ctx.strokeStyle = "red"
ctx.lineWidth = 2
ctx.beginPath()
let point = scaledPoint(openDoorAreaPoints[0],
width, height)
ctx.moveTo(point.x, point.y)
for (var i = 1; i < openDoorAreaPoints.length; i++) {
point = scaledPoint(openDoorAreaPoints[i],
width, height)
ctx.lineTo(point.x, point.y)
}
ctx.closePath()
ctx.stroke()
}
}
}
Repeater {
id: repeater
model: openDoorAreaPoints
delegate: Rectangle {
width: dargWidth
height: dargWidth
color: "red"
x: scaledPoint(modelData, canvas.width,
canvas.height).x - width / 2
y: scaledPoint(modelData, canvas.width,
canvas.height).y - height / 2
}
}
MouseArea {
anchors.fill: parent
property int draggedPointIndex: -1
onPressed: mouse => {
for (var i = 0; i < openDoorAreaPoints.length; i++) {
let point = scaledPoint(openDoorAreaPoints[i],
canvas.width,
canvas.height)
if (isInside(mouse.x, mouse.y, point)) {
draggedPointIndex = i
break
}
}
}
onReleased: {
draggedPointIndex = -1
}
onPositionChanged: mouse => {
if (draggedPointIndex >= 0) {
openDoorAreaPoints[draggedPointIndex] = standardPoint(
Qt.point(mouse.x, mouse.y),
canvas.width, canvas.height)
canvas.requestPaint()
repeater.model = openDoorAreaPoints
}
}
function isInside(x, y, point) {
let edge = dargWidth / 2
return x >= point.x - edge && x <= point.x + edge
&& y >= point.y - edge && y <= point.y + edge
}
}
}
//
function scaledPoint(point, width, height) {
let x = point.x * width / 640
let y = point.y * height / 360
return Qt.point(x, y)
}
function standardPoint(point, width, height) {
// (640x360)
let x = point.x * 640 / width
let y = point.y * 360 / height
return Qt.point(x, y)
}
Connections {
target: App
function onNewVideoFrame() {
image.source = ""
image.source = "image://videoframe/"
}
function onCurrentOpenDoorAreaPointsChanged() {
canvas.requestPaint()
}
}
}

65
H264Palyer.cpp Normal file
View File

@ -0,0 +1,65 @@
#include "H264Palyer.h"
#include "BoostLog.h"
extern "C" {
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
}
H264Palyer::H264Palyer() {
}
H264Palyer::~H264Palyer() {
if (m_packet != nullptr) {
av_packet_free(&m_packet);
}
}
void H264Palyer::open() {
auto codec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!codec) {
LOG(error) << "cannot find h264 codec.";
return;
}
m_codecContext = avcodec_alloc_context3(codec);
if (m_codecContext == nullptr) {
LOG(error) << "Could not allocate video codec context";
return;
}
if (avcodec_open2(m_codecContext, codec, nullptr) < 0) {
LOG(error) << "Could not open codec";
return;
}
m_packet = av_packet_alloc();
if (m_packet == nullptr) {
LOG(error) << "Could not allocate AVPacket";
}
m_frame = av_frame_alloc();
if (m_frame == nullptr) {
LOG(error) << "Could not allocate AVFrame";
}
}
std::optional<QImage> H264Palyer::decode(const uint8_t *data, uint32_t size) {
m_packet->data = const_cast<uint8_t *>(data);
m_packet->size = size;
int ret = avcodec_send_packet(m_codecContext, m_packet);
if (ret < 0) {
return std::nullopt;
}
ret = avcodec_receive_frame(m_codecContext, m_frame);
if (ret < 0) {
return std::nullopt;
}
SwsContext *swsCtx = sws_getContext(m_frame->width, m_frame->height, m_codecContext->pix_fmt, m_frame->width,
m_frame->height, AV_PIX_FMT_RGB24, SWS_BILINEAR, nullptr, nullptr, nullptr);
QImage image(m_frame->width, m_frame->height, QImage::Format_RGB888);
uint8_t *imageData[1] = {image.bits()};
int linesize[1] = {3 * m_frame->width};
sws_scale(swsCtx, m_frame->data, m_frame->linesize, 0, m_frame->height, imageData, linesize);
sws_freeContext(swsCtx);
// LOG(info) << image.width() << "x" << image.height();
return image;
}

25
H264Palyer.h Normal file
View File

@ -0,0 +1,25 @@
#ifndef H264PALYER_H
#define H264PALYER_H
#include <QImage>
#include <optional>
#include <string>
typedef struct AVFrame AVFrame;
typedef struct AVPacket AVPacket;
typedef struct AVCodecContext AVCodecContext;
class H264Palyer {
public:
H264Palyer();
~H264Palyer();
void open();
std::optional<QImage> decode(const uint8_t *data, uint32_t size);
private:
AVPacket *m_packet = nullptr;
AVFrame *m_frame = nullptr;
AVCodecContext *m_codecContext = nullptr;
};
#endif // H264PALYER_H

View File

@ -1,8 +1,39 @@
import QtQuick
import QtQuick.Controls
import AntiClipSettings
Window {
ApplicationWindow {
width: 640
height: 480
visible: true
title: qsTr("Hello World")
header: Row {
Button {
text: "连接"
onClicked: App.open()
}
Button {
text: "开始"
onClicked: App.start()
}
}
Rectangle {
id: deviceList
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.left: parent.left
width: 250
color: "red"
}
DeviceView {
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.left: deviceList.right
anchors.right: parent.right
openDoorAreaPoints: App.currentOpenDoorAreaPoints
}
}

26
VideoFrameProvider.cpp Normal file
View File

@ -0,0 +1,26 @@
#include "VideoFrameProvider.h"
VideoFrameProvider::VideoFrameProvider()
: QQuickImageProvider(QQuickImageProvider::Image), m_image(1280, 720, QImage::Format_RGB32) {
m_image.fill(Qt::black);
}
QImage VideoFrameProvider::requestImage(const QString &id, QSize *size, const QSize &requestedSize) {
Q_UNUSED(id);
if (size) *size = m_image.size();
if (requestedSize.width() > 0 && requestedSize.height() > 0)
return m_image.scaled(requestedSize.width(), requestedSize.height(), Qt::KeepAspectRatio);
return m_image;
}
void VideoFrameProvider::setImage(const QImage &image) {
m_image = image;
}
void VideoFrameProvider::reset() {
m_image = QImage(1280, 720, QImage::Format_RGB32);
m_image.fill(Qt::black);
}

16
VideoFrameProvider.h Normal file
View File

@ -0,0 +1,16 @@
#ifndef __VIDEOFRAMEPROVIDER_H__
#define __VIDEOFRAMEPROVIDER_H__
#include <QQuickImageProvider>
class VideoFrameProvider : public QQuickImageProvider {
public:
VideoFrameProvider();
QImage requestImage(const QString &id, QSize *size, const QSize &requestedSize) final;
void setImage(const QImage &image);
void reset();
private:
QImage m_image;
};
#endif // __VIDEOFRAMEPROVIDER_H__

View File

@ -1,18 +1,16 @@
#include "Application.h"
#include "BoostLog.h"
#include "Configuration.h"
#include <QGuiApplication>
#include <QQmlApplicationEngine>
int main(int argc, char *argv[])
{
QGuiApplication app(argc, argv);
int main(int argc, char *argv[]) {
using namespace Amass;
boost::log::initialize("logs/app");
LOG(info) << "Compiled on: " << __DATE__ << " " << __TIME__ << std::endl;
LOG(info) << "Git commit ID: " << GIT_COMMIT_ID << std::endl;
LOG(info) << "Program version: " << APP_VERSION << std::endl;
QQmlApplicationEngine engine;
QObject::connect(
&engine,
&QQmlApplicationEngine::objectCreationFailed,
&app,
[]() { QCoreApplication::exit(-1); },
Qt::QueuedConnection);
engine.loadFromModule("AntiClipSettings", "Main");
return app.exec();
auto app = Singleton<Application>::instance<Construct>(argc, argv);
return app->exec();
}

BIN
resources/logo.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

BIN
resources/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB