add code.

This commit is contained in:
luocai 2024-09-06 09:45:44 +08:00
parent 78bb8b9fc7
commit fe7ab957c8
13 changed files with 1366 additions and 28 deletions

View File

@ -37,6 +37,7 @@ target_link_directories(Record
target_link_libraries(Record
PRIVATE VocieProcess
PRIVATE absl::optional
PRIVATE asound
PRIVATE easymedia
PRIVATE drm

View File

@ -2,12 +2,41 @@
#include "SpeexDsp.h"
#include "Utility.h"
#include "WebRtcAecm.h"
#include "api/audio/echo_canceller3_config.h"
#include "api/audio/echo_canceller3_factory.h"
#include "main.h"
#include "modules/audio_processing/aec3/echo_canceller3.h"
#include <memory>
void EchoRecordTask::setVqeEnabled(bool enabled) {
if (m_vqeEnabled != enabled) {
m_vqeEnabled = enabled;
class EchoRecordPrivate {
public:
EchoRecordPrivate() {
std::unique_ptr<webrtc::EchoCanceller3Factory> factory = std::make_unique<webrtc::EchoCanceller3Factory>();
echoCanceller = factory->Create(16000, 1, 1);
// nearendBuffer = std::make_unique<webrtc::AudioBuffer>(16000, 1, 16000, 1, 16000, 1);
// farendBuffer = std::make_unique<webrtc::AudioBuffer>(16000, 1, 16000, 1, 16000, 1);
}
std::unique_ptr<webrtc::EchoControl> echoCanceller;
// std::unique_ptr<webrtc::AudioBuffer> nearendBuffer;
// std::unique_ptr<webrtc::AudioBuffer> farendBuffer;
};
EchoRecordTask::EchoRecordTask() : m_d{new EchoRecordPrivate()} {
}
EchoRecordTask::~EchoRecordTask() {
if (m_d != nullptr) {
delete m_d;
}
}
void EchoRecordTask::setDsp(Dsp dsp) {
if (m_dsp != dsp) {
m_dsp = dsp;
}
}
@ -23,10 +52,11 @@ void EchoRecordTask::setChannels(int channels) {
// ./Record --echo --vqe=true --channels=2
// ./Record --echo --vqe=false --channels=1
void EchoRecordTask::run() {
LOG(info) << "dsp use: " << dspToString(m_dsp);
RkAudio::Format format;
format.channels = m_channels;
format.period = 10;
format.period = 20;
m_speex = std::make_shared<SpeexDsp>();
m_speex->start(format.sampleRate, m_channels, format.period);
@ -37,7 +67,7 @@ void EchoRecordTask::run() {
m_webRtcAecm->start(format.sampleRate, format.channels, format.period);
m_output = std::make_shared<RkAudio::Output>();
if (!m_output->open(sizeof(uint16_t), format.sampleRate, 2, format.period, m_vqeEnabled)) {
if (!m_output->open(sizeof(uint16_t), format.sampleRate, 2, format.period, m_dsp == Vqe)) {
LOG(error) << "audio output open failed.";
return;
}
@ -45,16 +75,31 @@ void EchoRecordTask::run() {
m_outBuffer.resize(m_channels * sizeof(int16_t) * format.sampleRate / 1000 * format.period);
m_input = std::make_shared<RkAudio::Input>();
m_input->setDataCallback([this](const RkAudio::Frame &frame) {
m_input->setDataCallback([this, format](const RkAudio::Frame &frame) {
memcpy(m_nearendBuffer.data(), frame.data, frame.byteSize);
m_webRtcAecm->echoPlayback(reinterpret_cast<const int16_t *>(m_farendBuffer.data()), m_farendBuffer.size() / 2);
if (m_dsp == Speex) {
m_speex->echoPlayback(reinterpret_cast<const int16_t *>(m_farendBuffer.data()));
m_speex->echoCapture(reinterpret_cast<const int16_t *>(frame.data), reinterpret_cast<int16_t *>(m_outBuffer.data()));
} else if (m_dsp == AecMobile) {
m_webRtcAecm->echoPlayback(reinterpret_cast<const int16_t *>(m_farendBuffer.data()), m_farendBuffer.size() / 2);
m_webRtcAecm->echoCancellation(reinterpret_cast<int16_t *>(frame.data), reinterpret_cast<int16_t *>(m_nearendBuffer.data()),
reinterpret_cast<int16_t *>(m_outBuffer.data()), frame.frameSize);
// m_speex->echoPlayback(reinterpret_cast<const int16_t *>(frame.data));
// m_speex->echoPlayback(reinterpret_cast<const int16_t *>(m_buffer.data()));
m_webRtcAecm->echoCancellation(reinterpret_cast<int16_t *>(frame.data), reinterpret_cast<int16_t *>(m_nearendBuffer.data()),
reinterpret_cast<int16_t *>(m_outBuffer.data()), frame.frameSize);
} else if (m_dsp == Aec3) {
webrtc::StreamConfig config(format.sampleRate, format.channels); // 单声道
webrtc::AudioBuffer nearendBuffer(format.sampleRate, 1, format.sampleRate, 1, format.sampleRate, 1);
webrtc::AudioBuffer farendBuffer(format.sampleRate, 1, format.sampleRate, 1, format.sampleRate, 1);
webrtc::AudioBuffer linearOutputBuffer(format.sampleRate, 1, format.sampleRate, 1, format.sampleRate, 1);
nearendBuffer.CopyFrom(reinterpret_cast<const int16_t *>(frame.data), config);
// m_speex->echoCapture(reinterpret_cast<const int16_t *>(frame.data), reinterpret_cast<int16_t *>(m_buffer.data()));
farendBuffer.CopyFrom(reinterpret_cast<const int16_t *>(m_farendBuffer.data()), config);
m_d->echoCanceller->AnalyzeRender(&farendBuffer);
m_d->echoCanceller->AnalyzeCapture(&nearendBuffer);
m_d->echoCanceller->ProcessCapture(&nearendBuffer, &linearOutputBuffer, /*level_change=*/false);
linearOutputBuffer.CopyTo(config, reinterpret_cast<int16_t *>(m_outBuffer.data()));
}
if (m_channels == 2) {
m_output->write(frame.data, frame.byteSize);
@ -66,5 +111,5 @@ void EchoRecordTask::run() {
// m_output->write(reinterpret_cast<const uint8_t *>(m_buffer.data()), m_buffer.size());
});
m_input->open(format, m_vqeEnabled);
m_input->open(format, m_dsp == Vqe);
}

View File

@ -15,12 +15,6 @@
#include <fstream>
#include <rkmedia/rkmedia_api.h>
extern void rkDemo();
extern int AI_VqeProcess_AO();
extern int AI_VqeProcess_AO1();
extern void AecTest();
extern int opus_test();
void signal_handler(const boost::system::error_code &error, int signal_number) {
if (!error) {
LOG(info) << "Caught signal: " << signal_number << std::endl;
@ -40,7 +34,7 @@ int main(int argc, char **argv) {
("echo", "Self-recording and self-play test")
("record", "Record to file.")
("play", "Play pcm file.")
("vqe", boost::program_options::value<bool>(), "Enable rk 3a.")
("dsp", boost::program_options::value<std::string>(), "vqe, speex, aecm")
("channels", boost::program_options::value<int>(), "set audio channles")
("path", boost::program_options::value<std::string>(), "file path")
;
@ -57,9 +51,9 @@ int main(int argc, char **argv) {
std::shared_ptr<Task> task;
if (variablesMap.count("echo")) {
bool vqe = false;
if (variablesMap.count("vqe")) {
vqe = variablesMap["vqe"].as<bool>();
Dsp dsp = Vqe;
if (variablesMap.count("dsp")) {
dsp = dspFromString(variablesMap["dsp"].as<std::string>());
}
int channels = 2;
@ -68,7 +62,7 @@ int main(int argc, char **argv) {
}
auto t = std::make_shared<EchoRecordTask>();
t->setVqeEnabled(vqe);
t->setDsp(dsp);
t->setChannels(channels);
task = std::dynamic_pointer_cast<Task>(t);
} else if (variablesMap.count("record")) {
@ -114,4 +108,30 @@ int main(int argc, char **argv) {
// WebRTCPublisher publisher(true, true);
// publisher.start("172.16.103.68", "443", "/index/api/webrtc?app=live&stream=test&type=push");
return 0;
}
Dsp dspFromString(const std::string &dsp) {
Dsp ret = Vqe;
if (dsp == "speex") {
ret = Speex;
} else if (dsp == "aecm") {
ret = AecMobile;
}else if (dsp == "aec3") {
ret = Aec3;
}
return ret;
}
std::string dspToString(Dsp dsp) {
std::string ret = "none";
if (dsp == Vqe) {
ret = "vqe";
} else if (dsp == Speex) {
ret = "speex";
} else if (dsp == AecMobile) {
ret = "aecm";
} else if (dsp == Aec3) {
ret = "aec3";
}
return ret;
}

View File

@ -7,9 +7,19 @@
class SpeexDsp;
class WebRtcAecm;
enum Dsp {
Vqe,
Speex,
AecMobile,
Aec3,
};
class Task {
public:
virtual void run() = 0;
virtual ~Task() {
}
};
class RecorderTask : public Task {
@ -38,15 +48,18 @@ private:
std::shared_ptr<RkAudio::Output> m_output;
};
class EchoRecordPrivate;
class EchoRecordTask : public Task {
public:
void setVqeEnabled(bool enabled);
EchoRecordTask();
~EchoRecordTask();
void setDsp(Dsp dsp);
void setChannels(int channels);
void run() final;
private:
int m_channels = 2;
bool m_vqeEnabled = false;
Dsp m_dsp = Vqe;
std::shared_ptr<RkAudio::Output> m_output;
std::shared_ptr<RkAudio::Input> m_input;
std::shared_ptr<SpeexDsp> m_speex;
@ -55,6 +68,11 @@ private:
std::vector<uint8_t> m_nearendBuffer;
std::vector<uint8_t> m_farendBuffer;
EchoRecordPrivate *m_d = nullptr;
};
Dsp dspFromString(const std::string &dsp);
std::string dspToString(Dsp dsp);
#endif // __MAIN_H__

View File

@ -19,12 +19,14 @@ add_library(VocieProcess
api/audio/audio_processing.h api/audio/audio_processing.cc
api/audio/channel_layout.h api/audio/channel_layout.cc
api/audio/echo_canceller3_config.h api/audio/echo_canceller3_config.cc
api/audio/echo_canceller3_factory.h api/audio/echo_canceller3_factory.cc
api/task_queue/task_queue_base.h api/task_queue/task_queue_base.cc
api/units/time_delta.h api/units/time_delta.cc
api/units/timestamp.h api/units/timestamp.cc
common_audio/audio_util.cc
common_audio/channel_buffer.h common_audio/channel_buffer.cc
common_audio/ring_buffer.h common_audio/ring_buffer.c
@ -41,6 +43,7 @@ add_library(VocieProcess
common_audio/signal_processing/randomization_functions.c
common_audio/signal_processing/real_fft.c
common_audio/signal_processing/spl_init.c
common_audio/signal_processing/splitting_filter.c
common_audio/signal_processing/vector_scaling_operations.c
common_audio/third_party/ooura/fft_size_128/ooura_fft.h common_audio/third_party/ooura/fft_size_128/ooura_fft.cc
@ -136,13 +139,16 @@ add_library(VocieProcess
modules/audio_processing/utility/cascaded_biquad_filter.h modules/audio_processing/utility/cascaded_biquad_filter.cc
modules/audio_processing/utility/delay_estimator_wrapper.h modules/audio_processing/utility/delay_estimator_wrapper.cc
modules/audio_processing/utility/delay_estimator.h modules/audio_processing/utility/delay_estimator.cc
system_wrappers/source/field_trial.cc
system_wrappers/source/metrics.cc
)
target_compile_definitions(VocieProcess
PRIVATE NOMINMAX # <windows.h>
PRIVATE RTC_DISABLE_LOGGING
PRIVATE RTC_METRICS_ENABLED=0
PRIVATE WEBRTC_APM_DEBUG_DUMP=0
PUBLIC RTC_DISABLE_METRICS
PUBLIC WEBRTC_APM_DEBUG_DUMP=0
$<$<PLATFORM_ID:Windows>:WEBRTC_WIN>
$<$<PLATFORM_ID:Linux>:WEBRTC_POSIX WEBRTC_LINUX>
)

View File

@ -0,0 +1,35 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/audio/echo_canceller3_factory.h"
#include <memory>
#include "absl/types/optional.h"
#include "api/audio/echo_canceller3_config.h"
#include "api/audio/echo_control.h"
#include "modules/audio_processing/aec3/echo_canceller3.h"
namespace webrtc {
EchoCanceller3Factory::EchoCanceller3Factory() {}
EchoCanceller3Factory::EchoCanceller3Factory(const EchoCanceller3Config& config)
: config_(config) {}
std::unique_ptr<EchoControl> EchoCanceller3Factory::Create(
int sample_rate_hz,
int num_render_channels,
int num_capture_channels) {
return std::make_unique<EchoCanceller3>(
config_, /*multichannel_config=*/absl::nullopt, sample_rate_hz,
num_render_channels, num_capture_channels);
}
} // namespace webrtc

View File

@ -0,0 +1,41 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_AUDIO_ECHO_CANCELLER3_FACTORY_H_
#define API_AUDIO_ECHO_CANCELLER3_FACTORY_H_
#include <memory>
#include "api/audio/echo_canceller3_config.h"
#include "api/audio/echo_control.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
class RTC_EXPORT EchoCanceller3Factory : public EchoControlFactory {
public:
// Factory producing EchoCanceller3 instances with the default configuration.
EchoCanceller3Factory();
// Factory producing EchoCanceller3 instances with the specified
// configuration.
explicit EchoCanceller3Factory(const EchoCanceller3Config& config);
// Creates an EchoCanceller3 with a specified channel count and sampling rate.
std::unique_ptr<EchoControl> Create(int sample_rate_hz,
int num_render_channels,
int num_capture_channels) override;
private:
const EchoCanceller3Config config_;
};
} // namespace webrtc
#endif // API_AUDIO_ECHO_CANCELLER3_FACTORY_H_

View File

@ -0,0 +1,54 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_audio/include/audio_util.h"
namespace webrtc {
void FloatToS16(const float* src, size_t size, int16_t* dest) {
for (size_t i = 0; i < size; ++i)
dest[i] = FloatToS16(src[i]);
}
void S16ToFloat(const int16_t* src, size_t size, float* dest) {
for (size_t i = 0; i < size; ++i)
dest[i] = S16ToFloat(src[i]);
}
void S16ToFloatS16(const int16_t* src, size_t size, float* dest) {
for (size_t i = 0; i < size; ++i)
dest[i] = src[i];
}
void FloatS16ToS16(const float* src, size_t size, int16_t* dest) {
for (size_t i = 0; i < size; ++i)
dest[i] = FloatS16ToS16(src[i]);
}
void FloatToFloatS16(const float* src, size_t size, float* dest) {
for (size_t i = 0; i < size; ++i)
dest[i] = FloatToFloatS16(src[i]);
}
void FloatS16ToFloat(const float* src, size_t size, float* dest) {
for (size_t i = 0; i < size; ++i)
dest[i] = FloatS16ToFloat(src[i]);
}
template <>
void DownmixInterleavedToMono<int16_t>(const int16_t* interleaved,
size_t num_frames,
int num_channels,
int16_t* deinterleaved) {
DownmixInterleavedToMonoImpl<int16_t, int32_t>(interleaved, num_frames,
num_channels, deinterleaved);
}
} // namespace webrtc

View File

@ -0,0 +1,211 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the splitting filter functions.
*
*/
#include "rtc_base/checks.h"
#include "common_audio/signal_processing/include/signal_processing_library.h"
// Maximum number of samples in a low/high-band frame.
enum
{
kMaxBandFrameLength = 320 // 10 ms at 64 kHz.
};
// QMF filter coefficients in Q16.
static const uint16_t WebRtcSpl_kAllPassFilter1[3] = {6418, 36982, 57261};
static const uint16_t WebRtcSpl_kAllPassFilter2[3] = {21333, 49062, 63010};
///////////////////////////////////////////////////////////////////////////////////////////////
// WebRtcSpl_AllPassQMF(...)
//
// Allpass filter used by the analysis and synthesis parts of the QMF filter.
//
// Input:
// - in_data : Input data sequence (Q10)
// - data_length : Length of data sequence (>2)
// - filter_coefficients : Filter coefficients (length 3, Q16)
//
// Input & Output:
// - filter_state : Filter state (length 6, Q10).
//
// Output:
// - out_data : Output data sequence (Q10), length equal to
// `data_length`
//
static void WebRtcSpl_AllPassQMF(int32_t* in_data,
size_t data_length,
int32_t* out_data,
const uint16_t* filter_coefficients,
int32_t* filter_state)
{
// The procedure is to filter the input with three first order all pass
// filters (cascade operations).
//
// a_3 + q^-1 a_2 + q^-1 a_1 + q^-1
// y[n] = ----------- ----------- ----------- x[n]
// 1 + a_3q^-1 1 + a_2q^-1 1 + a_1q^-1
//
// The input vector `filter_coefficients` includes these three filter
// coefficients. The filter state contains the in_data state, in_data[-1],
// followed by the out_data state, out_data[-1]. This is repeated for each
// cascade. The first cascade filter will filter the `in_data` and store
// the output in `out_data`. The second will the take the `out_data` as
// input and make an intermediate storage in `in_data`, to save memory. The
// third, and final, cascade filter operation takes the `in_data` (which is
// the output from the previous cascade filter) and store the output in
// `out_data`. Note that the input vector values are changed during the
// process.
size_t k;
int32_t diff;
// First all-pass cascade; filter from in_data to out_data.
// Let y_i[n] indicate the output of cascade filter i (with filter
// coefficient a_i) at vector position n. Then the final output will be
// y[n] = y_3[n]
// First loop, use the states stored in memory.
// "diff" should be safe from wrap around since max values are 2^25
// diff = (x[0] - y_1[-1])
diff = WebRtcSpl_SubSatW32(in_data[0], filter_state[1]);
// y_1[0] = x[-1] + a_1 * (x[0] - y_1[-1])
out_data[0] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[0], diff, filter_state[0]);
// For the remaining loops, use previous values.
for (k = 1; k < data_length; k++)
{
// diff = (x[n] - y_1[n-1])
diff = WebRtcSpl_SubSatW32(in_data[k], out_data[k - 1]);
// y_1[n] = x[n-1] + a_1 * (x[n] - y_1[n-1])
out_data[k] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[0], diff, in_data[k - 1]);
}
// Update states.
filter_state[0] = in_data[data_length - 1]; // x[N-1], becomes x[-1] next time
filter_state[1] = out_data[data_length - 1]; // y_1[N-1], becomes y_1[-1] next time
// Second all-pass cascade; filter from out_data to in_data.
// diff = (y_1[0] - y_2[-1])
diff = WebRtcSpl_SubSatW32(out_data[0], filter_state[3]);
// y_2[0] = y_1[-1] + a_2 * (y_1[0] - y_2[-1])
in_data[0] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[1], diff, filter_state[2]);
for (k = 1; k < data_length; k++)
{
// diff = (y_1[n] - y_2[n-1])
diff = WebRtcSpl_SubSatW32(out_data[k], in_data[k - 1]);
// y_2[0] = y_1[-1] + a_2 * (y_1[0] - y_2[-1])
in_data[k] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[1], diff, out_data[k-1]);
}
filter_state[2] = out_data[data_length - 1]; // y_1[N-1], becomes y_1[-1] next time
filter_state[3] = in_data[data_length - 1]; // y_2[N-1], becomes y_2[-1] next time
// Third all-pass cascade; filter from in_data to out_data.
// diff = (y_2[0] - y[-1])
diff = WebRtcSpl_SubSatW32(in_data[0], filter_state[5]);
// y[0] = y_2[-1] + a_3 * (y_2[0] - y[-1])
out_data[0] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[2], diff, filter_state[4]);
for (k = 1; k < data_length; k++)
{
// diff = (y_2[n] - y[n-1])
diff = WebRtcSpl_SubSatW32(in_data[k], out_data[k - 1]);
// y[n] = y_2[n-1] + a_3 * (y_2[n] - y[n-1])
out_data[k] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[2], diff, in_data[k-1]);
}
filter_state[4] = in_data[data_length - 1]; // y_2[N-1], becomes y_2[-1] next time
filter_state[5] = out_data[data_length - 1]; // y[N-1], becomes y[-1] next time
}
void WebRtcSpl_AnalysisQMF(const int16_t* in_data, size_t in_data_length,
int16_t* low_band, int16_t* high_band,
int32_t* filter_state1, int32_t* filter_state2)
{
size_t i;
int16_t k;
int32_t tmp;
int32_t half_in1[kMaxBandFrameLength];
int32_t half_in2[kMaxBandFrameLength];
int32_t filter1[kMaxBandFrameLength];
int32_t filter2[kMaxBandFrameLength];
const size_t band_length = in_data_length / 2;
RTC_DCHECK_EQ(0, in_data_length % 2);
RTC_DCHECK_LE(band_length, kMaxBandFrameLength);
// Split even and odd samples. Also shift them to Q10.
for (i = 0, k = 0; i < band_length; i++, k += 2)
{
half_in2[i] = ((int32_t)in_data[k]) * (1 << 10);
half_in1[i] = ((int32_t)in_data[k + 1]) * (1 << 10);
}
// All pass filter even and odd samples, independently.
WebRtcSpl_AllPassQMF(half_in1, band_length, filter1,
WebRtcSpl_kAllPassFilter1, filter_state1);
WebRtcSpl_AllPassQMF(half_in2, band_length, filter2,
WebRtcSpl_kAllPassFilter2, filter_state2);
// Take the sum and difference of filtered version of odd and even
// branches to get upper & lower band.
for (i = 0; i < band_length; i++)
{
tmp = (filter1[i] + filter2[i] + 1024) >> 11;
low_band[i] = WebRtcSpl_SatW32ToW16(tmp);
tmp = (filter1[i] - filter2[i] + 1024) >> 11;
high_band[i] = WebRtcSpl_SatW32ToW16(tmp);
}
}
void WebRtcSpl_SynthesisQMF(const int16_t* low_band, const int16_t* high_band,
size_t band_length, int16_t* out_data,
int32_t* filter_state1, int32_t* filter_state2)
{
int32_t tmp;
int32_t half_in1[kMaxBandFrameLength];
int32_t half_in2[kMaxBandFrameLength];
int32_t filter1[kMaxBandFrameLength];
int32_t filter2[kMaxBandFrameLength];
size_t i;
int16_t k;
RTC_DCHECK_LE(band_length, kMaxBandFrameLength);
// Obtain the sum and difference channels out of upper and lower-band channels.
// Also shift to Q10 domain.
for (i = 0; i < band_length; i++)
{
tmp = (int32_t)low_band[i] + (int32_t)high_band[i];
half_in1[i] = tmp * (1 << 10);
tmp = (int32_t)low_band[i] - (int32_t)high_band[i];
half_in2[i] = tmp * (1 << 10);
}
// all-pass filter the sum and difference channels
WebRtcSpl_AllPassQMF(half_in1, band_length, filter1,
WebRtcSpl_kAllPassFilter2, filter_state1);
WebRtcSpl_AllPassQMF(half_in2, band_length, filter2,
WebRtcSpl_kAllPassFilter1, filter_state2);
// The filtered signals are even and odd samples of the output. Combine
// them. The signals are Q10 should shift them back to Q0 and take care of
// saturation.
for (i = 0, k = 0; i < band_length; i++)
{
tmp = (filter2[i] + 512) >> 10;
out_data[k++] = WebRtcSpl_SatW32ToW16(tmp);
tmp = (filter1[i] + 512) >> 10;
out_data[k++] = WebRtcSpl_SatW32ToW16(tmp);
}
}

View File

@ -0,0 +1,289 @@
// This file was automatically generated. Do not edit.
#ifndef GEN_REGISTERED_FIELD_TRIALS_H_
#define GEN_REGISTERED_FIELD_TRIALS_H_
#include "absl/strings/string_view.h"
namespace webrtc {
inline constexpr absl::string_view kRegisteredFieldTrials[] = {
"WebRTC-AV1-OverridePriorityBitrate",
"WebRTC-AddNetworkCostToVpn",
"WebRTC-AddPacingToCongestionWindowPushback",
"WebRTC-AdjustOpusBandwidth",
"WebRTC-Aec3AecStateFullResetKillSwitch",
"WebRTC-Aec3AecStateSubtractorAnalyzerResetKillSwitch",
"WebRTC-Aec3AntiHowlingMinimizationKillSwitch",
"WebRTC-Aec3BufferingMaxAllowedExcessRenderBlocksOverride",
"WebRTC-Aec3ClampInstQualityToOneKillSwitch",
"WebRTC-Aec3ClampInstQualityToZeroKillSwitch",
"WebRTC-Aec3CoarseFilterResetHangoverKillSwitch",
"WebRTC-Aec3ConservativeTailFreqResponse",
"WebRTC-Aec3DeactivateInitialStateResetKillSwitch",
"WebRTC-Aec3DelayEstimateSmoothingDelayFoundOverride",
"WebRTC-Aec3DelayEstimateSmoothingOverride",
"WebRTC-Aec3EchoSaturationDetectionKillSwitch",
"WebRTC-Aec3EnforceCaptureDelayEstimationDownmixing",
"WebRTC-Aec3EnforceCaptureDelayEstimationLeftRightPrioritization",
"WebRTC-Aec3EnforceConservativeHfSuppression",
"WebRTC-Aec3EnforceLowActiveRenderLimit",
"WebRTC-Aec3EnforceMoreTransparentNearendSuppressorHfTuning",
"WebRTC-Aec3EnforceMoreTransparentNearendSuppressorTuning",
"WebRTC-Aec3EnforceMoreTransparentNormalSuppressorHfTuning",
"WebRTC-Aec3EnforceMoreTransparentNormalSuppressorTuning",
"WebRTC-Aec3EnforceRapidlyAdjustingNearendSuppressorTunings",
"WebRTC-Aec3EnforceRapidlyAdjustingNormalSuppressorTunings",
"WebRTC-Aec3EnforceRenderDelayEstimationDownmixing",
"WebRTC-Aec3EnforceSlowlyAdjustingNearendSuppressorTunings",
"WebRTC-Aec3EnforceSlowlyAdjustingNormalSuppressorTunings",
"WebRTC-Aec3EnforceStationarityProperties",
"WebRTC-Aec3EnforceStationarityPropertiesAtInit",
"WebRTC-Aec3EnforceVeryLowActiveRenderLimit",
"WebRTC-Aec3HighPassFilterEchoReference",
"WebRTC-Aec3MinErleDuringOnsetsKillSwitch",
"WebRTC-Aec3NonlinearModeReverbKillSwitch",
"WebRTC-Aec3OnsetDetectionKillSwitch",
"WebRTC-Aec3RenderDelayEstimationLeftRightPrioritizationKillSwitch",
"WebRTC-Aec3SensitiveDominantNearendActivation",
"WebRTC-Aec3SetupSpecificDefaultConfigDefaultsKillSwitch",
"WebRTC-Aec3ShortHeadroomKillSwitch",
"WebRTC-Aec3StereoContentDetectionKillSwitch",
"WebRTC-Aec3SuppressorAntiHowlingGainOverride",
"WebRTC-Aec3SuppressorDominantNearendEnrExitThresholdOverride",
"WebRTC-Aec3SuppressorDominantNearendEnrThresholdOverride",
"WebRTC-Aec3SuppressorDominantNearendHoldDurationOverride",
"WebRTC-Aec3SuppressorDominantNearendSnrThresholdOverride",
"WebRTC-Aec3SuppressorDominantNearendTriggerThresholdOverride",
"WebRTC-Aec3SuppressorNearendHfMaskSuppressOverride",
"WebRTC-Aec3SuppressorNearendHfMaskTransparentOverride",
"WebRTC-Aec3SuppressorNearendLfMaskSuppressOverride",
"WebRTC-Aec3SuppressorNearendLfMaskTransparentOverride",
"WebRTC-Aec3SuppressorNearendMaxDecFactorLfOverride",
"WebRTC-Aec3SuppressorNearendMaxIncFactorOverride",
"WebRTC-Aec3SuppressorNormalHfMaskSuppressOverride",
"WebRTC-Aec3SuppressorNormalHfMaskTransparentOverride",
"WebRTC-Aec3SuppressorNormalLfMaskSuppressOverride",
"WebRTC-Aec3SuppressorNormalLfMaskTransparentOverride",
"WebRTC-Aec3SuppressorNormalMaxDecFactorLfOverride",
"WebRTC-Aec3SuppressorNormalMaxIncFactorOverride",
"WebRTC-Aec3SuppressorTuningOverride",
"WebRTC-Aec3TransparentAntiHowlingGain",
"WebRTC-Aec3TransparentModeHmm",
"WebRTC-Aec3TransparentModeKillSwitch",
"WebRTC-Aec3Use1Dot2SecondsInitialStateDuration",
"WebRTC-Aec3Use1Dot6SecondsInitialStateDuration",
"WebRTC-Aec3Use2Dot0SecondsInitialStateDuration",
"WebRTC-Aec3UseDot1SecondsInitialStateDuration",
"WebRTC-Aec3UseDot2SecondsInitialStateDuration",
"WebRTC-Aec3UseDot3SecondsInitialStateDuration",
"WebRTC-Aec3UseDot6SecondsInitialStateDuration",
"WebRTC-Aec3UseDot9SecondsInitialStateDuration",
"WebRTC-Aec3UseErleOnsetCompensationInDominantNearend",
"WebRTC-Aec3UseLowEarlyReflectionsDefaultGain",
"WebRTC-Aec3UseLowLateReflectionsDefaultGain",
"WebRTC-Aec3UseNearendReverbLen",
"WebRTC-Aec3UseShortConfigChangeDuration",
"WebRTC-Aec3UseZeroInitialStateDuration",
"WebRTC-Aec3VerySensitiveDominantNearendActivation",
"WebRTC-Agc2SimdAvx2KillSwitch",
"WebRTC-Agc2SimdNeonKillSwitch",
"WebRTC-Agc2SimdSse2KillSwitch",
"WebRTC-AllowMACBasedIPv6",
"WebRTC-AlrDetectorParameters",
"WebRTC-AndroidNetworkMonitor-IsAdapterAvailable",
"WebRTC-ApmExperimentalMultiChannelCaptureKillSwitch",
"WebRTC-ApmExperimentalMultiChannelRenderKillSwitch",
"WebRTC-Audio-2ndAgcMinMicLevelExperiment",
"WebRTC-Audio-ABWENoTWCC",
"WebRTC-Audio-AdaptivePtime",
"WebRTC-Audio-Allocation",
"WebRTC-Audio-AlrProbing",
"WebRTC-Audio-FecAdaptation",
"WebRTC-Audio-GainController2",
"WebRTC-Audio-LegacyOverhead",
"WebRTC-Audio-MinimizeResamplingOnMobile",
"WebRTC-Audio-NetEqDecisionLogicConfig",
"WebRTC-Audio-NetEqDelayManagerConfig",
"WebRTC-Audio-NetEqNackTrackerConfig",
"WebRTC-Audio-NetEqSmartFlushing",
"WebRTC-Audio-OpusBitrateMultipliers",
"WebRTC-Audio-OpusGeneratePlc",
"WebRTC-Audio-PriorityBitrate",
"WebRTC-Audio-Red-For-Opus",
"WebRTC-Audio-StableTargetAdaptation",
"WebRTC-Audio-iOS-Holding",
"WebRTC-AudioDevicePlayoutBufferSizeFactor",
"WebRTC-Av1-GetEncoderInfoOverride",
"WebRTC-BindUsingInterfaceName",
"WebRTC-BitrateAdjusterUseNewfangledHeadroomAdjustment",
"WebRTC-Bwe-AllocationProbing",
"WebRTC-Bwe-AlrProbing",
"WebRTC-Bwe-EstimateBoundedIncrease",
"WebRTC-Bwe-ExponentialProbing",
"WebRTC-Bwe-IgnoreProbesLowerThanNetworkStateEstimate",
"WebRTC-Bwe-InitialProbing",
"WebRTC-Bwe-InjectedCongestionController",
"WebRTC-Bwe-LimitPacingFactorByUpperLinkCapacityEstimate",
"WebRTC-Bwe-LimitProbesLowerThanThroughputEstimate",
"WebRTC-Bwe-LossBasedBweV2",
"WebRTC-Bwe-LossBasedControl",
"WebRTC-Bwe-MaxRttLimit",
"WebRTC-Bwe-MinAllocAsLowerBound",
"WebRTC-Bwe-NetworkRouteConstraints",
"WebRTC-Bwe-NoFeedbackReset",
"WebRTC-Bwe-PaceAtMaxOfBweAndLowerLinkCapacity",
"WebRTC-Bwe-ProbingBehavior",
"WebRTC-Bwe-ProbingConfiguration",
"WebRTC-Bwe-ReceiveTimeFix",
"WebRTC-Bwe-ReceiverLimitCapsOnly",
"WebRTC-Bwe-ResetOnAdapterIdChange",
"WebRTC-Bwe-RobustThroughputEstimatorSettings",
"WebRTC-Bwe-SafeResetOnRouteChange",
"WebRTC-Bwe-SeparateAudioPackets",
"WebRTC-Bwe-TrendlineEstimatorSettings",
"WebRTC-BweBackOffFactor",
"WebRTC-BweLossExperiment",
"WebRTC-BweRapidRecoveryExperiment",
"WebRTC-BweThroughputWindowConfig",
"WebRTC-BweWindowSizeInPackets",
"WebRTC-CongestionWindow",
"WebRTC-CpuLoadEstimator",
"WebRTC-DataChannelMessageInterleaving",
"WebRTC-Debugging-RtpDump",
"WebRTC-DecoderDataDumpDirectory",
"WebRTC-DefaultBitrateLimitsKillSwitch",
"WebRTC-DependencyDescriptorAdvertised",
"WebRTC-DisableRtxRateLimiter",
"WebRTC-DisableUlpFecExperiment",
"WebRTC-DontIncreaseDelayBasedBweInAlr",
"WebRTC-DscpFieldTrial",
"WebRTC-ElasticBitrateAllocation",
"WebRTC-EncoderDataDumpDirectory",
"WebRTC-ExtraICEPing",
"WebRTC-FakeNetworkReceiveConfig",
"WebRTC-FakeNetworkSendConfig",
"WebRTC-FilterAbsSendTimeExtension",
"WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart",
"WebRTC-FlexFEC-03",
"WebRTC-FlexFEC-03-Advertised",
"WebRTC-ForcePlayoutDelay",
"WebRTC-ForceSendPlayoutDelay",
"WebRTC-ForceSimulatedOveruseIntervalMs",
"WebRTC-FrameCadenceAdapter-UseVideoFrameTimestamp",
"WebRTC-FrameDropper",
"WebRTC-FullBandHpfKillSwitch",
"WebRTC-GenericCodecDependencyDescriptor",
"WebRTC-GenericDescriptorAdvertised",
"WebRTC-GenericDescriptorAuth",
"WebRTC-GenericPictureId",
"WebRTC-GetEncoderInfoOverride",
"WebRTC-H264HighProfile",
"WebRTC-IPv6Default",
"WebRTC-IPv6NetworkResolutionFixes",
"WebRTC-IceControllerFieldTrials",
"WebRTC-IceFieldTrials",
"WebRTC-IncomingTimestampOnMarkerBitOnly",
"WebRTC-IncreaseIceCandidatePriorityHostSrflx",
"WebRTC-JitterEstimatorConfig",
"WebRTC-KeyframeInterval",
"WebRTC-LegacyFrameIdJumpBehavior",
"WebRTC-LegacySimulcastLayerLimit",
"WebRTC-LegacyTlsProtocols",
"WebRTC-LibaomAv1Encoder-AdaptiveMaxConsecDrops",
"WebRTC-LibvpxVp8Encoder-AndroidSpecificThreadingSettings",
"WebRTC-LibvpxVp9Encoder-SvcFrameDropConfig",
"WebRTC-LowresSimulcastBitrateInterpolation",
"WebRTC-MutedStateKillSwitch",
"WebRTC-Network-UseNWPathMonitor",
"WebRTC-NetworkMonitorAutoDetect",
"WebRTC-NormalizeSimulcastResolution",
"WebRTC-Pacer-BlockAudio",
"WebRTC-Pacer-DrainQueue",
"WebRTC-Pacer-FastRetransmissions",
"WebRTC-Pacer-IgnoreTransportOverhead",
"WebRTC-Pacer-KeyframeFlushing",
"WebRTC-Pacer-PadInSilence",
"WebRTC-PacketBufferMaxSize",
"WebRTC-PcFactoryDefaultBitrates",
"WebRTC-PermuteTlsClientHello",
"WebRTC-PiggybackIceCheckAcknowledgement",
"WebRTC-PixelLimitResource",
"WebRTC-ProbingScreenshareBwe",
"WebRTC-ProtectionOverheadRateThreshold",
"WebRTC-QCM-Dynamic-AV1",
"WebRTC-QCM-Dynamic-VP8",
"WebRTC-QCM-Dynamic-VP9",
"WebRTC-QpParsingKillSwitch",
"WebRTC-RFC8888CongestionControlFeedback",
"WebRTC-ReceiveBufferSize",
"WebRTC-RtcEventLogEncodeDependencyDescriptor",
"WebRTC-RtcEventLogEncodeNetEqSetMinimumDelayKillSwitch",
"WebRTC-RtcEventLogKillSwitch",
"WebRTC-RtcEventLogNewFormat",
"WebRTC-RtcpLossNotification",
"WebRTC-SendBufferSizeBytes",
"WebRTC-SendNackDelayMs",
"WebRTC-SetCodecPreferences-ReceiveOnlyFilterInsteadOfThrow",
"WebRTC-SetReadyToSendFalseIfSendFail",
"WebRTC-SetSocketReceiveBuffer",
"WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride",
"WebRTC-SimulcastLayerLimitRoundUp",
"WebRTC-SpsPpsIdrIsH264Keyframe",
"WebRTC-SrtpRemoveReceiveStream",
"WebRTC-StableTargetRate",
"WebRTC-StrictPacingAndProbing",
"WebRTC-StunInterPacketDelay",
"WebRTC-SurfaceCellularTypes",
"WebRTC-SwitchEncoderOnInitializationFailures",
"WebRTC-Target-Bitrate-Rtcp",
"WebRTC-TaskQueue-ReplaceLibeventWithStdlib",
"WebRTC-TransientSuppressorForcedOff",
"WebRTC-UseBaseHeavyVP8TL3RateAllocation",
"WebRTC-UseDifferentiatedCellularCosts",
"WebRTC-UseNtpTimeAbsoluteSendTime",
"WebRTC-UseStandardBytesStats",
"WebRTC-UseTurnServerAsStunServer",
"WebRTC-VP8-ForcePartitionResilience",
"WebRTC-VP8-Forced-Fallback-Encoder-v2",
"WebRTC-VP8-GetEncoderInfoOverride",
"WebRTC-VP8-MaxFrameInterval",
"WebRTC-VP8-Postproc-Config",
"WebRTC-VP8-Postproc-Config-Arm",
"WebRTC-VP8IosMaxNumberOfThread",
"WebRTC-VP9-GetEncoderInfoOverride",
"WebRTC-VP9-LowTierOptimizations",
"WebRTC-VP9-PerformanceFlags",
"WebRTC-VP9QualityScaler",
"WebRTC-Video-AV1EvenPayloadSizes",
"WebRTC-Video-BalancedDegradation",
"WebRTC-Video-BalancedDegradationSettings",
"WebRTC-Video-DisableAutomaticResize",
"WebRTC-Video-DiscardPacketsWithUnknownSsrc",
"WebRTC-Video-EnableRetransmitAllLayers",
"WebRTC-Video-EncoderFallbackSettings",
"WebRTC-Video-ForcedSwDecoderFallback",
"WebRTC-Video-H26xPacketBuffer",
"WebRTC-Video-InitialDecoderResolution",
"WebRTC-Video-MinVideoBitrate",
"WebRTC-Video-Pacing",
"WebRTC-Video-PreferTemporalSupportOnBaseLayer",
"WebRTC-Video-QualityScalerSettings",
"WebRTC-Video-QualityScaling",
"WebRTC-Video-SimulcastIndependentFrameIds",
"WebRTC-Video-UseFrameRateForOverhead",
"WebRTC-Video-Vp9FlexibleMode",
"WebRTC-VideoEncoderSettings",
"WebRTC-VideoFrameTrackingIdAdvertised",
"WebRTC-VideoLayersAllocationAdvertised",
"WebRTC-VideoRateControl",
"WebRTC-Vp9ExternalRefCtrl",
"WebRTC-Vp9InterLayerPred",
"WebRTC-Vp9IssueKeyFrameOnLayerDeactivation",
"WebRTC-ZeroHertzQueueOverload",
"WebRTC-ZeroPlayoutDelay",
};
} // namespace webrtc
#endif // GEN_REGISTERED_FIELD_TRIALS_H_

View File

@ -0,0 +1,101 @@
/*
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_
#define RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_
#if defined(WEBRTC_POSIX)
#include <pthread.h>
#if defined(WEBRTC_MAC)
#include <pthread_spis.h>
#endif
#include "absl/base/attributes.h"
#include "rtc_base/system/no_unique_address.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
class RTC_LOCKABLE MutexImpl final {
public:
MutexImpl() {
pthread_mutexattr_t mutex_attribute;
pthread_mutexattr_init(&mutex_attribute);
#if defined(WEBRTC_MAC)
pthread_mutexattr_setpolicy_np(&mutex_attribute,
_PTHREAD_MUTEX_POLICY_FIRSTFIT);
#endif
pthread_mutex_init(&mutex_, &mutex_attribute);
pthread_mutexattr_destroy(&mutex_attribute);
}
MutexImpl(const MutexImpl&) = delete;
MutexImpl& operator=(const MutexImpl&) = delete;
~MutexImpl() { pthread_mutex_destroy(&mutex_); }
void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() {
pthread_mutex_lock(&mutex_);
owner_.SetOwner();
}
ABSL_MUST_USE_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
if (pthread_mutex_trylock(&mutex_) != 0) {
return false;
}
owner_.SetOwner();
return true;
}
void AssertHeld() const RTC_ASSERT_EXCLUSIVE_LOCK() { owner_.AssertOwned(); }
void Unlock() RTC_UNLOCK_FUNCTION() {
owner_.ClearOwner();
pthread_mutex_unlock(&mutex_);
}
private:
class OwnerRecord {
public:
#if !RTC_DCHECK_IS_ON
void SetOwner() {}
void ClearOwner() {}
void AssertOwned() const {}
#else
void SetOwner() {
latest_owner_ = pthread_self();
is_owned_ = true;
}
void ClearOwner() { is_owned_ = false; }
void AssertOwned() const {
RTC_CHECK(is_owned_);
RTC_CHECK(pthread_equal(latest_owner_, pthread_self()));
}
private:
// Use two separate primitive types, rather than absl::optional, since the
// data race described below might invalidate absl::optional invariants.
bool is_owned_ = false;
pthread_t latest_owner_ = pthread_self();
#endif
};
pthread_mutex_t mutex_;
// This record is modified only with the mutex held, and hence, calls to
// AssertHeld where mutex is held are race-free and will always succeed.
//
// The failure case is more subtle: If AssertHeld is called from some thread
// not holding the mutex, and RTC_DCHECK_IS_ON==1, we have a data race. It is
// highly likely that the calling thread will see `is_owned_` false or
// `latest_owner_` different from itself, and crash. But it may fail to crash,
// and invoke some other undefined behavior (still, this race can happen only
// when RTC_DCHECK_IS_ON==1).
RTC_NO_UNIQUE_ADDRESS OwnerRecord owner_;
};
} // namespace webrtc
#endif // #if defined(WEBRTC_POSIX)
#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_

View File

@ -0,0 +1,186 @@
// Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
//
// Use of this source code is governed by a BSD-style license
// that can be found in the LICENSE file in the root of the source
// tree. An additional intellectual property rights grant can be found
// in the file PATENTS. All contributing project authors may
// be found in the AUTHORS file in the root of the source tree.
//
#include "system_wrappers/include/field_trial.h"
#include <stddef.h>
#include <map>
#include <string>
#include <utility>
#include "absl/algorithm/container.h"
#include "absl/strings/string_view.h"
#include "experiments/registered_field_trials.h"
#include "rtc_base/checks.h"
#include "rtc_base/containers/flat_set.h"
#include "rtc_base/logging.h"
#include "rtc_base/string_encode.h"
// Simple field trial implementation, which allows client to
// specify desired flags in InitFieldTrialsFromString.
namespace webrtc {
namespace field_trial {
static const char* trials_init_string = NULL;
namespace {
constexpr char kPersistentStringSeparator = '/';
flat_set<std::string>& TestKeys() {
static auto* test_keys = new flat_set<std::string>();
return *test_keys;
}
// Validates the given field trial string.
// E.g.:
// "WebRTC-experimentFoo/Enabled/WebRTC-experimentBar/Enabled100kbps/"
// Assigns the process to group "Enabled" on WebRTCExperimentFoo trial
// and to group "Enabled100kbps" on WebRTCExperimentBar.
//
// E.g. invalid config:
// "WebRTC-experiment1/Enabled" (note missing / separator at the end).
bool FieldTrialsStringIsValidInternal(const absl::string_view trials) {
if (trials.empty())
return true;
size_t next_item = 0;
std::map<absl::string_view, absl::string_view> field_trials;
while (next_item < trials.length()) {
size_t name_end = trials.find(kPersistentStringSeparator, next_item);
if (name_end == trials.npos || next_item == name_end)
return false;
size_t group_name_end =
trials.find(kPersistentStringSeparator, name_end + 1);
if (group_name_end == trials.npos || name_end + 1 == group_name_end)
return false;
absl::string_view name = trials.substr(next_item, name_end - next_item);
absl::string_view group_name =
trials.substr(name_end + 1, group_name_end - name_end - 1);
next_item = group_name_end + 1;
// Fail if duplicate with different group name.
if (field_trials.find(name) != field_trials.end() &&
field_trials.find(name)->second != group_name) {
return false;
}
field_trials[name] = group_name;
}
return true;
}
} // namespace
bool FieldTrialsStringIsValid(absl::string_view trials_string) {
return FieldTrialsStringIsValidInternal(trials_string);
}
void InsertOrReplaceFieldTrialStringsInMap(
std::map<std::string, std::string>* fieldtrial_map,
const absl::string_view trials_string) {
if (FieldTrialsStringIsValidInternal(trials_string)) {
std::vector<absl::string_view> tokens = rtc::split(trials_string, '/');
// Skip last token which is empty due to trailing '/'.
for (size_t idx = 0; idx < tokens.size() - 1; idx += 2) {
(*fieldtrial_map)[std::string(tokens[idx])] =
std::string(tokens[idx + 1]);
}
} else {
RTC_DCHECK_NOTREACHED() << "Invalid field trials string:" << trials_string;
}
}
std::string MergeFieldTrialsStrings(absl::string_view first,
absl::string_view second) {
std::map<std::string, std::string> fieldtrial_map;
InsertOrReplaceFieldTrialStringsInMap(&fieldtrial_map, first);
InsertOrReplaceFieldTrialStringsInMap(&fieldtrial_map, second);
// Merge into fieldtrial string.
std::string merged = "";
for (auto const& fieldtrial : fieldtrial_map) {
merged += fieldtrial.first + '/' + fieldtrial.second + '/';
}
return merged;
}
#ifndef WEBRTC_EXCLUDE_FIELD_TRIAL_DEFAULT
std::string FindFullName(absl::string_view name) {
#if WEBRTC_STRICT_FIELD_TRIALS == 1
RTC_DCHECK(absl::c_linear_search(kRegisteredFieldTrials, name) ||
TestKeys().contains(name))
<< name << " is not registered, see g3doc/field-trials.md.";
#elif WEBRTC_STRICT_FIELD_TRIALS == 2
RTC_LOG_IF(LS_WARNING,
!(absl::c_linear_search(kRegisteredFieldTrials, name) ||
TestKeys().contains(name)))
<< name << " is not registered, see g3doc/field-trials.md.";
#endif
if (trials_init_string == NULL)
return std::string();
absl::string_view trials_string(trials_init_string);
if (trials_string.empty())
return std::string();
size_t next_item = 0;
while (next_item < trials_string.length()) {
// Find next name/value pair in field trial configuration string.
size_t field_name_end =
trials_string.find(kPersistentStringSeparator, next_item);
if (field_name_end == trials_string.npos || field_name_end == next_item)
break;
size_t field_value_end =
trials_string.find(kPersistentStringSeparator, field_name_end + 1);
if (field_value_end == trials_string.npos ||
field_value_end == field_name_end + 1)
break;
absl::string_view field_name =
trials_string.substr(next_item, field_name_end - next_item);
absl::string_view field_value = trials_string.substr(
field_name_end + 1, field_value_end - field_name_end - 1);
next_item = field_value_end + 1;
if (name == field_name)
return std::string(field_value);
}
return std::string();
}
#endif // WEBRTC_EXCLUDE_FIELD_TRIAL_DEFAULT
// Optionally initialize field trial from a string.
void InitFieldTrialsFromString(const char* trials_string) {
RTC_LOG(LS_INFO) << "Setting field trial string:" << trials_string;
if (trials_string) {
RTC_DCHECK(FieldTrialsStringIsValidInternal(trials_string))
<< "Invalid field trials string:" << trials_string;
};
trials_init_string = trials_string;
}
const char* GetFieldTrialString() {
return trials_init_string;
}
FieldTrialsAllowedInScopeForTesting::FieldTrialsAllowedInScopeForTesting(
flat_set<std::string> keys) {
TestKeys() = std::move(keys);
}
FieldTrialsAllowedInScopeForTesting::~FieldTrialsAllowedInScopeForTesting() {
TestKeys().clear();
}
} // namespace field_trial
} // namespace webrtc

View File

@ -0,0 +1,331 @@
// Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
//
// Use of this source code is governed by a BSD-style license
// that can be found in the LICENSE file in the root of the source
// tree. An additional intellectual property rights grant can be found
// in the file PATENTS. All contributing project authors may
// be found in the AUTHORS file in the root of the source tree.
//
#include "system_wrappers/include/metrics.h"
#include <algorithm>
#include "absl/strings/string_view.h"
#include "rtc_base/string_utils.h"
#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
// Default implementation of histogram methods for WebRTC clients that do not
// want to provide their own implementation.
namespace webrtc {
namespace metrics {
class Histogram;
namespace {
// Limit for the maximum number of sample values that can be stored.
// TODO(asapersson): Consider using bucket count (and set up
// linearly/exponentially spaced buckets) if samples are logged more frequently.
const int kMaxSampleMapSize = 300;
class RtcHistogram {
public:
RtcHistogram(absl::string_view name, int min, int max, int bucket_count)
: min_(min), max_(max), info_(name, min, max, bucket_count) {
RTC_DCHECK_GT(bucket_count, 0);
}
RtcHistogram(const RtcHistogram&) = delete;
RtcHistogram& operator=(const RtcHistogram&) = delete;
void Add(int sample) {
sample = std::min(sample, max_);
sample = std::max(sample, min_ - 1); // Underflow bucket.
MutexLock lock(&mutex_);
if (info_.samples.size() == kMaxSampleMapSize &&
info_.samples.find(sample) == info_.samples.end()) {
return;
}
++info_.samples[sample];
}
// Returns a copy (or nullptr if there are no samples) and clears samples.
std::unique_ptr<SampleInfo> GetAndReset() {
MutexLock lock(&mutex_);
if (info_.samples.empty())
return nullptr;
SampleInfo* copy =
new SampleInfo(info_.name, info_.min, info_.max, info_.bucket_count);
std::swap(info_.samples, copy->samples);
return std::unique_ptr<SampleInfo>(copy);
}
const std::string& name() const { return info_.name; }
// Functions only for testing.
void Reset() {
MutexLock lock(&mutex_);
info_.samples.clear();
}
int NumEvents(int sample) const {
MutexLock lock(&mutex_);
const auto it = info_.samples.find(sample);
return (it == info_.samples.end()) ? 0 : it->second;
}
int NumSamples() const {
int num_samples = 0;
MutexLock lock(&mutex_);
for (const auto& sample : info_.samples) {
num_samples += sample.second;
}
return num_samples;
}
int MinSample() const {
MutexLock lock(&mutex_);
return (info_.samples.empty()) ? -1 : info_.samples.begin()->first;
}
std::map<int, int> Samples() const {
MutexLock lock(&mutex_);
return info_.samples;
}
private:
mutable Mutex mutex_;
const int min_;
const int max_;
SampleInfo info_ RTC_GUARDED_BY(mutex_);
};
class RtcHistogramMap {
public:
RtcHistogramMap() {}
~RtcHistogramMap() {}
RtcHistogramMap(const RtcHistogramMap&) = delete;
RtcHistogramMap& operator=(const RtcHistogramMap&) = delete;
Histogram* GetCountsHistogram(absl::string_view name,
int min,
int max,
int bucket_count) {
MutexLock lock(&mutex_);
const auto& it = map_.find(name);
if (it != map_.end())
return reinterpret_cast<Histogram*>(it->second.get());
RtcHistogram* hist = new RtcHistogram(name, min, max, bucket_count);
map_.emplace(name, hist);
return reinterpret_cast<Histogram*>(hist);
}
Histogram* GetEnumerationHistogram(absl::string_view name, int boundary) {
MutexLock lock(&mutex_);
const auto& it = map_.find(name);
if (it != map_.end())
return reinterpret_cast<Histogram*>(it->second.get());
RtcHistogram* hist = new RtcHistogram(name, 1, boundary, boundary + 1);
map_.emplace(name, hist);
return reinterpret_cast<Histogram*>(hist);
}
void GetAndReset(std::map<std::string,
std::unique_ptr<SampleInfo>,
rtc::AbslStringViewCmp>* histograms) {
MutexLock lock(&mutex_);
for (const auto& kv : map_) {
std::unique_ptr<SampleInfo> info = kv.second->GetAndReset();
if (info)
histograms->insert(std::make_pair(kv.first, std::move(info)));
}
}
// Functions only for testing.
void Reset() {
MutexLock lock(&mutex_);
for (const auto& kv : map_)
kv.second->Reset();
}
int NumEvents(absl::string_view name, int sample) const {
MutexLock lock(&mutex_);
const auto& it = map_.find(name);
return (it == map_.end()) ? 0 : it->second->NumEvents(sample);
}
int NumSamples(absl::string_view name) const {
MutexLock lock(&mutex_);
const auto& it = map_.find(name);
return (it == map_.end()) ? 0 : it->second->NumSamples();
}
int MinSample(absl::string_view name) const {
MutexLock lock(&mutex_);
const auto& it = map_.find(name);
return (it == map_.end()) ? -1 : it->second->MinSample();
}
std::map<int, int> Samples(absl::string_view name) const {
MutexLock lock(&mutex_);
const auto& it = map_.find(name);
return (it == map_.end()) ? std::map<int, int>() : it->second->Samples();
}
private:
mutable Mutex mutex_;
std::map<std::string, std::unique_ptr<RtcHistogram>, rtc::AbslStringViewCmp>
map_ RTC_GUARDED_BY(mutex_);
};
// RtcHistogramMap is allocated upon call to Enable().
// The histogram getter functions, which return pointer values to the histograms
// in the map, are cached in WebRTC. Therefore, this memory is not freed by the
// application (the memory will be reclaimed by the OS).
static std::atomic<RtcHistogramMap*> g_rtc_histogram_map(nullptr);
void CreateMap() {
RtcHistogramMap* map = g_rtc_histogram_map.load(std::memory_order_acquire);
if (map == nullptr) {
RtcHistogramMap* new_map = new RtcHistogramMap();
if (!g_rtc_histogram_map.compare_exchange_strong(map, new_map))
delete new_map;
}
}
// Set the first time we start using histograms. Used to make sure Enable() is
// not called thereafter.
#if RTC_DCHECK_IS_ON
static std::atomic<int> g_rtc_histogram_called(0);
#endif
// Gets the map (or nullptr).
RtcHistogramMap* GetMap() {
#if RTC_DCHECK_IS_ON
g_rtc_histogram_called.store(1, std::memory_order_release);
#endif
return g_rtc_histogram_map.load();
}
} // namespace
#ifndef WEBRTC_EXCLUDE_METRICS_DEFAULT
// Implementation of histogram methods in
// webrtc/system_wrappers/interface/metrics.h.
// Histogram with exponentially spaced buckets.
// Creates (or finds) histogram.
// The returned histogram pointer is cached (and used for adding samples in
// subsequent calls).
Histogram* HistogramFactoryGetCounts(absl::string_view name,
int min,
int max,
int bucket_count) {
// TODO(asapersson): Alternative implementation will be needed if this
// histogram type should be truly exponential.
return HistogramFactoryGetCountsLinear(name, min, max, bucket_count);
}
// Histogram with linearly spaced buckets.
// Creates (or finds) histogram.
// The returned histogram pointer is cached (and used for adding samples in
// subsequent calls).
Histogram* HistogramFactoryGetCountsLinear(absl::string_view name,
int min,
int max,
int bucket_count) {
RtcHistogramMap* map = GetMap();
if (!map)
return nullptr;
return map->GetCountsHistogram(name, min, max, bucket_count);
}
// Histogram with linearly spaced buckets.
// Creates (or finds) histogram.
// The returned histogram pointer is cached (and used for adding samples in
// subsequent calls).
Histogram* HistogramFactoryGetEnumeration(absl::string_view name,
int boundary) {
RtcHistogramMap* map = GetMap();
if (!map)
return nullptr;
return map->GetEnumerationHistogram(name, boundary);
}
// Our default implementation reuses the non-sparse histogram.
Histogram* SparseHistogramFactoryGetEnumeration(absl::string_view name,
int boundary) {
return HistogramFactoryGetEnumeration(name, boundary);
}
// Fast path. Adds `sample` to cached `histogram_pointer`.
void HistogramAdd(Histogram* histogram_pointer, int sample) {
RtcHistogram* ptr = reinterpret_cast<RtcHistogram*>(histogram_pointer);
ptr->Add(sample);
}
#endif // WEBRTC_EXCLUDE_METRICS_DEFAULT
SampleInfo::SampleInfo(absl::string_view name,
int min,
int max,
size_t bucket_count)
: name(name), min(min), max(max), bucket_count(bucket_count) {}
SampleInfo::~SampleInfo() {}
// Implementation of global functions in metrics.h.
void Enable() {
RTC_DCHECK(g_rtc_histogram_map.load() == nullptr);
#if RTC_DCHECK_IS_ON
RTC_DCHECK_EQ(0, g_rtc_histogram_called.load(std::memory_order_acquire));
#endif
CreateMap();
}
void GetAndReset(
std::map<std::string, std::unique_ptr<SampleInfo>, rtc::AbslStringViewCmp>*
histograms) {
histograms->clear();
RtcHistogramMap* map = GetMap();
if (map)
map->GetAndReset(histograms);
}
void Reset() {
RtcHistogramMap* map = GetMap();
if (map)
map->Reset();
}
int NumEvents(absl::string_view name, int sample) {
RtcHistogramMap* map = GetMap();
return map ? map->NumEvents(name, sample) : 0;
}
int NumSamples(absl::string_view name) {
RtcHistogramMap* map = GetMap();
return map ? map->NumSamples(name) : 0;
}
int MinSample(absl::string_view name) {
RtcHistogramMap* map = GetMap();
return map ? map->MinSample(name) : -1;
}
std::map<int, int> Samples(absl::string_view name) {
RtcHistogramMap* map = GetMap();
return map ? map->Samples(name) : std::map<int, int>();
}
} // namespace metrics
} // namespace webrtc