153a5a1b3Sopenharmony_ci/***
253a5a1b3Sopenharmony_ci    This file is part of PulseAudio.
353a5a1b3Sopenharmony_ci
453a5a1b3Sopenharmony_ci    Copyright 2011 Collabora Ltd.
553a5a1b3Sopenharmony_ci              2015 Aldebaran SoftBank Group
653a5a1b3Sopenharmony_ci
753a5a1b3Sopenharmony_ci    Contributor: Arun Raghavan <mail@arunraghavan.net>
853a5a1b3Sopenharmony_ci
953a5a1b3Sopenharmony_ci    PulseAudio is free software; you can redistribute it and/or modify
1053a5a1b3Sopenharmony_ci    it under the terms of the GNU Lesser General Public License as published
1153a5a1b3Sopenharmony_ci    by the Free Software Foundation; either version 2.1 of the License,
1253a5a1b3Sopenharmony_ci    or (at your option) any later version.
1353a5a1b3Sopenharmony_ci
1453a5a1b3Sopenharmony_ci    PulseAudio is distributed in the hope that it will be useful, but
1553a5a1b3Sopenharmony_ci    WITHOUT ANY WARRANTY; without even the implied warranty of
1653a5a1b3Sopenharmony_ci    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
1753a5a1b3Sopenharmony_ci    General Public License for more details.
1853a5a1b3Sopenharmony_ci
1953a5a1b3Sopenharmony_ci    You should have received a copy of the GNU Lesser General Public License
2053a5a1b3Sopenharmony_ci    along with PulseAudio; if not, see <http://www.gnu.org/licenses/>.
2153a5a1b3Sopenharmony_ci***/
2253a5a1b3Sopenharmony_ci
2353a5a1b3Sopenharmony_ci#ifdef HAVE_CONFIG_H
2453a5a1b3Sopenharmony_ci#include <config.h>
2553a5a1b3Sopenharmony_ci#endif
2653a5a1b3Sopenharmony_ci
2753a5a1b3Sopenharmony_ci#include <pulse/cdecl.h>
2853a5a1b3Sopenharmony_ci
2953a5a1b3Sopenharmony_ciPA_C_DECL_BEGIN
3053a5a1b3Sopenharmony_ci#include <pulsecore/core-util.h>
3153a5a1b3Sopenharmony_ci#include <pulsecore/modargs.h>
3253a5a1b3Sopenharmony_ci
3353a5a1b3Sopenharmony_ci#include <pulse/timeval.h>
3453a5a1b3Sopenharmony_ci#include "echo-cancel.h"
3553a5a1b3Sopenharmony_ciPA_C_DECL_END
3653a5a1b3Sopenharmony_ci
3753a5a1b3Sopenharmony_ci#include <webrtc/modules/audio_processing/include/audio_processing.h>
3853a5a1b3Sopenharmony_ci#include <webrtc/modules/interface/module_common_types.h>
3953a5a1b3Sopenharmony_ci#include <webrtc/system_wrappers/include/trace.h>
4053a5a1b3Sopenharmony_ci
4153a5a1b3Sopenharmony_ci#define BLOCK_SIZE_US 10000
4253a5a1b3Sopenharmony_ci
4353a5a1b3Sopenharmony_ci#define DEFAULT_HIGH_PASS_FILTER true
4453a5a1b3Sopenharmony_ci#define DEFAULT_NOISE_SUPPRESSION true
4553a5a1b3Sopenharmony_ci#define DEFAULT_ANALOG_GAIN_CONTROL true
4653a5a1b3Sopenharmony_ci#define DEFAULT_DIGITAL_GAIN_CONTROL false
4753a5a1b3Sopenharmony_ci#define DEFAULT_MOBILE false
4853a5a1b3Sopenharmony_ci#define DEFAULT_ROUTING_MODE "speakerphone"
4953a5a1b3Sopenharmony_ci#define DEFAULT_COMFORT_NOISE true
5053a5a1b3Sopenharmony_ci#define DEFAULT_DRIFT_COMPENSATION false
5153a5a1b3Sopenharmony_ci#define DEFAULT_VAD true
5253a5a1b3Sopenharmony_ci#define DEFAULT_EXTENDED_FILTER false
5353a5a1b3Sopenharmony_ci#define DEFAULT_INTELLIGIBILITY_ENHANCER false
5453a5a1b3Sopenharmony_ci#define DEFAULT_EXPERIMENTAL_AGC false
5553a5a1b3Sopenharmony_ci#define DEFAULT_AGC_START_VOLUME 85
5653a5a1b3Sopenharmony_ci#define DEFAULT_BEAMFORMING false
5753a5a1b3Sopenharmony_ci#define DEFAULT_TRACE false
5853a5a1b3Sopenharmony_ci
5953a5a1b3Sopenharmony_ci#define WEBRTC_AGC_MAX_VOLUME 255
6053a5a1b3Sopenharmony_ci
6153a5a1b3Sopenharmony_cistatic const char* const valid_modargs[] = {
6253a5a1b3Sopenharmony_ci    "high_pass_filter",
6353a5a1b3Sopenharmony_ci    "noise_suppression",
6453a5a1b3Sopenharmony_ci    "analog_gain_control",
6553a5a1b3Sopenharmony_ci    "digital_gain_control",
6653a5a1b3Sopenharmony_ci    "mobile",
6753a5a1b3Sopenharmony_ci    "routing_mode",
6853a5a1b3Sopenharmony_ci    "comfort_noise",
6953a5a1b3Sopenharmony_ci    "drift_compensation",
7053a5a1b3Sopenharmony_ci    "voice_detection",
7153a5a1b3Sopenharmony_ci    "extended_filter",
7253a5a1b3Sopenharmony_ci    "intelligibility_enhancer",
7353a5a1b3Sopenharmony_ci    "experimental_agc",
7453a5a1b3Sopenharmony_ci    "agc_start_volume",
7553a5a1b3Sopenharmony_ci    "beamforming",
7653a5a1b3Sopenharmony_ci    "mic_geometry", /* documented in parse_mic_geometry() */
7753a5a1b3Sopenharmony_ci    "target_direction", /* documented in parse_mic_geometry() */
7853a5a1b3Sopenharmony_ci    "trace",
7953a5a1b3Sopenharmony_ci    NULL
8053a5a1b3Sopenharmony_ci};
8153a5a1b3Sopenharmony_ci
8253a5a1b3Sopenharmony_cistatic int routing_mode_from_string(const char *rmode) {
8353a5a1b3Sopenharmony_ci    if (pa_streq(rmode, "quiet-earpiece-or-headset"))
8453a5a1b3Sopenharmony_ci        return webrtc::EchoControlMobile::kQuietEarpieceOrHeadset;
8553a5a1b3Sopenharmony_ci    else if (pa_streq(rmode, "earpiece"))
8653a5a1b3Sopenharmony_ci        return webrtc::EchoControlMobile::kEarpiece;
8753a5a1b3Sopenharmony_ci    else if (pa_streq(rmode, "loud-earpiece"))
8853a5a1b3Sopenharmony_ci        return webrtc::EchoControlMobile::kLoudEarpiece;
8953a5a1b3Sopenharmony_ci    else if (pa_streq(rmode, "speakerphone"))
9053a5a1b3Sopenharmony_ci        return webrtc::EchoControlMobile::kSpeakerphone;
9153a5a1b3Sopenharmony_ci    else if (pa_streq(rmode, "loud-speakerphone"))
9253a5a1b3Sopenharmony_ci        return webrtc::EchoControlMobile::kLoudSpeakerphone;
9353a5a1b3Sopenharmony_ci    else
9453a5a1b3Sopenharmony_ci        return -1;
9553a5a1b3Sopenharmony_ci}
9653a5a1b3Sopenharmony_ci
9753a5a1b3Sopenharmony_ciclass PaWebrtcTraceCallback : public webrtc::TraceCallback {
9853a5a1b3Sopenharmony_ci    void Print(webrtc::TraceLevel level, const char *message, int length)
9953a5a1b3Sopenharmony_ci    {
10053a5a1b3Sopenharmony_ci        if (level & webrtc::kTraceError || level & webrtc::kTraceCritical)
10153a5a1b3Sopenharmony_ci            pa_log("%s", message);
10253a5a1b3Sopenharmony_ci        else if (level & webrtc::kTraceWarning)
10353a5a1b3Sopenharmony_ci            pa_log_warn("%s", message);
10453a5a1b3Sopenharmony_ci        else if (level & webrtc::kTraceInfo)
10553a5a1b3Sopenharmony_ci            pa_log_info("%s", message);
10653a5a1b3Sopenharmony_ci        else
10753a5a1b3Sopenharmony_ci            pa_log_debug("%s", message);
10853a5a1b3Sopenharmony_ci    }
10953a5a1b3Sopenharmony_ci};
11053a5a1b3Sopenharmony_ci
11153a5a1b3Sopenharmony_cistatic int webrtc_volume_from_pa(pa_volume_t v)
11253a5a1b3Sopenharmony_ci{
11353a5a1b3Sopenharmony_ci    return (v * WEBRTC_AGC_MAX_VOLUME) / PA_VOLUME_NORM;
11453a5a1b3Sopenharmony_ci}
11553a5a1b3Sopenharmony_ci
11653a5a1b3Sopenharmony_cistatic pa_volume_t webrtc_volume_to_pa(int v)
11753a5a1b3Sopenharmony_ci{
11853a5a1b3Sopenharmony_ci    return (v * PA_VOLUME_NORM) / WEBRTC_AGC_MAX_VOLUME;
11953a5a1b3Sopenharmony_ci}
12053a5a1b3Sopenharmony_ci
12153a5a1b3Sopenharmony_cistatic void webrtc_ec_fixate_spec(pa_sample_spec *rec_ss, pa_channel_map *rec_map,
12253a5a1b3Sopenharmony_ci                                  pa_sample_spec *play_ss, pa_channel_map *play_map,
12353a5a1b3Sopenharmony_ci                                  pa_sample_spec *out_ss, pa_channel_map *out_map,
12453a5a1b3Sopenharmony_ci                                  bool beamforming)
12553a5a1b3Sopenharmony_ci{
12653a5a1b3Sopenharmony_ci    rec_ss->format = PA_SAMPLE_FLOAT32NE;
12753a5a1b3Sopenharmony_ci    play_ss->format = PA_SAMPLE_FLOAT32NE;
12853a5a1b3Sopenharmony_ci
12953a5a1b3Sopenharmony_ci    /* AudioProcessing expects one of the following rates */
13053a5a1b3Sopenharmony_ci    if (rec_ss->rate >= 48000)
13153a5a1b3Sopenharmony_ci        rec_ss->rate = 48000;
13253a5a1b3Sopenharmony_ci    else if (rec_ss->rate >= 32000)
13353a5a1b3Sopenharmony_ci        rec_ss->rate = 32000;
13453a5a1b3Sopenharmony_ci    else if (rec_ss->rate >= 16000)
13553a5a1b3Sopenharmony_ci        rec_ss->rate = 16000;
13653a5a1b3Sopenharmony_ci    else
13753a5a1b3Sopenharmony_ci        rec_ss->rate = 8000;
13853a5a1b3Sopenharmony_ci
13953a5a1b3Sopenharmony_ci    *out_ss = *rec_ss;
14053a5a1b3Sopenharmony_ci    *out_map = *rec_map;
14153a5a1b3Sopenharmony_ci
14253a5a1b3Sopenharmony_ci    if (beamforming) {
14353a5a1b3Sopenharmony_ci        /* The beamformer gives us a single channel */
14453a5a1b3Sopenharmony_ci        out_ss->channels = 1;
14553a5a1b3Sopenharmony_ci        pa_channel_map_init_mono(out_map);
14653a5a1b3Sopenharmony_ci    }
14753a5a1b3Sopenharmony_ci
14853a5a1b3Sopenharmony_ci    /* Playback stream rate needs to be the same as capture */
14953a5a1b3Sopenharmony_ci    play_ss->rate = rec_ss->rate;
15053a5a1b3Sopenharmony_ci}
15153a5a1b3Sopenharmony_ci
15253a5a1b3Sopenharmony_cistatic bool parse_point(const char **point, float (&f)[3]) {
15353a5a1b3Sopenharmony_ci    int ret, length;
15453a5a1b3Sopenharmony_ci
15553a5a1b3Sopenharmony_ci    ret = sscanf(*point, "%g,%g,%g%n", &f[0], &f[1], &f[2], &length);
15653a5a1b3Sopenharmony_ci    if (ret != 3)
15753a5a1b3Sopenharmony_ci        return false;
15853a5a1b3Sopenharmony_ci
15953a5a1b3Sopenharmony_ci    /* Consume the bytes we've read so far */
16053a5a1b3Sopenharmony_ci    *point += length;
16153a5a1b3Sopenharmony_ci
16253a5a1b3Sopenharmony_ci    return true;
16353a5a1b3Sopenharmony_ci}
16453a5a1b3Sopenharmony_ci
16553a5a1b3Sopenharmony_cistatic bool parse_mic_geometry(const char **mic_geometry, std::vector<webrtc::Point>& geometry) {
16653a5a1b3Sopenharmony_ci    /* The microphone geometry is expressed as cartesian point form:
16753a5a1b3Sopenharmony_ci     *   x1,y1,z1,x2,y2,z2,...
16853a5a1b3Sopenharmony_ci     *
16953a5a1b3Sopenharmony_ci     * Where x1,y1,z1 is the position of the first microphone with regards to
17053a5a1b3Sopenharmony_ci     * the array's "center", x2,y2,z2 the position of the second, and so on.
17153a5a1b3Sopenharmony_ci     *
17253a5a1b3Sopenharmony_ci     * 'x' is the horizontal coordinate, with positive values being to the
17353a5a1b3Sopenharmony_ci     * right from the mic array's perspective.
17453a5a1b3Sopenharmony_ci     *
17553a5a1b3Sopenharmony_ci     * 'y' is the depth coordinate, with positive values being in front of the
17653a5a1b3Sopenharmony_ci     * array.
17753a5a1b3Sopenharmony_ci     *
17853a5a1b3Sopenharmony_ci     * 'z' is the vertical coordinate, with positive values being above the
17953a5a1b3Sopenharmony_ci     * array.
18053a5a1b3Sopenharmony_ci     *
18153a5a1b3Sopenharmony_ci     * All distances are in meters.
18253a5a1b3Sopenharmony_ci     */
18353a5a1b3Sopenharmony_ci
18453a5a1b3Sopenharmony_ci    /* The target direction is expected to be in spherical point form:
18553a5a1b3Sopenharmony_ci     *   a,e,r
18653a5a1b3Sopenharmony_ci     *
18753a5a1b3Sopenharmony_ci     * Where 'a' is the azimuth of the target point relative to the center of
18853a5a1b3Sopenharmony_ci     * the array, 'e' its elevation, and 'r' the radius.
18953a5a1b3Sopenharmony_ci     *
19053a5a1b3Sopenharmony_ci     * 0 radians azimuth is to the right of the array, and positive angles
19153a5a1b3Sopenharmony_ci     * move in a counter-clockwise direction.
19253a5a1b3Sopenharmony_ci     *
19353a5a1b3Sopenharmony_ci     * 0 radians elevation is horizontal w.r.t. the array, and positive
19453a5a1b3Sopenharmony_ci     * angles go upwards.
19553a5a1b3Sopenharmony_ci     *
19653a5a1b3Sopenharmony_ci     * radius is distance from the array center in meters.
19753a5a1b3Sopenharmony_ci     */
19853a5a1b3Sopenharmony_ci
19953a5a1b3Sopenharmony_ci    long unsigned int i;
20053a5a1b3Sopenharmony_ci    float f[3];
20153a5a1b3Sopenharmony_ci
20253a5a1b3Sopenharmony_ci    for (i = 0; i < geometry.size(); i++) {
20353a5a1b3Sopenharmony_ci        if (!parse_point(mic_geometry, f)) {
20453a5a1b3Sopenharmony_ci            pa_log("Failed to parse channel %lu in mic_geometry", i);
20553a5a1b3Sopenharmony_ci            return false;
20653a5a1b3Sopenharmony_ci        }
20753a5a1b3Sopenharmony_ci
20853a5a1b3Sopenharmony_ci        /* Except for the last point, we should have a trailing comma */
20953a5a1b3Sopenharmony_ci        if (i != geometry.size() - 1) {
21053a5a1b3Sopenharmony_ci            if (**mic_geometry != ',') {
21153a5a1b3Sopenharmony_ci                pa_log("Failed to parse channel %lu in mic_geometry", i);
21253a5a1b3Sopenharmony_ci                return false;
21353a5a1b3Sopenharmony_ci            }
21453a5a1b3Sopenharmony_ci
21553a5a1b3Sopenharmony_ci            (*mic_geometry)++;
21653a5a1b3Sopenharmony_ci        }
21753a5a1b3Sopenharmony_ci
21853a5a1b3Sopenharmony_ci        pa_log_debug("Got mic #%lu position: (%g, %g, %g)", i, f[0], f[1], f[2]);
21953a5a1b3Sopenharmony_ci
22053a5a1b3Sopenharmony_ci        geometry[i].c[0] = f[0];
22153a5a1b3Sopenharmony_ci        geometry[i].c[1] = f[1];
22253a5a1b3Sopenharmony_ci        geometry[i].c[2] = f[2];
22353a5a1b3Sopenharmony_ci    }
22453a5a1b3Sopenharmony_ci
22553a5a1b3Sopenharmony_ci    if (**mic_geometry != '\0') {
22653a5a1b3Sopenharmony_ci        pa_log("Failed to parse mic_geometry value: more parameters than expected");
22753a5a1b3Sopenharmony_ci        return false;
22853a5a1b3Sopenharmony_ci    }
22953a5a1b3Sopenharmony_ci
23053a5a1b3Sopenharmony_ci    return true;
23153a5a1b3Sopenharmony_ci}
23253a5a1b3Sopenharmony_ci
23353a5a1b3Sopenharmony_cibool pa_webrtc_ec_init(pa_core *c, pa_echo_canceller *ec,
23453a5a1b3Sopenharmony_ci                       pa_sample_spec *rec_ss, pa_channel_map *rec_map,
23553a5a1b3Sopenharmony_ci                       pa_sample_spec *play_ss, pa_channel_map *play_map,
23653a5a1b3Sopenharmony_ci                       pa_sample_spec *out_ss, pa_channel_map *out_map,
23753a5a1b3Sopenharmony_ci                       uint32_t *nframes, const char *args) {
23853a5a1b3Sopenharmony_ci    webrtc::AudioProcessing *apm = NULL;
23953a5a1b3Sopenharmony_ci    webrtc::ProcessingConfig pconfig;
24053a5a1b3Sopenharmony_ci    webrtc::Config config;
24153a5a1b3Sopenharmony_ci    bool hpf, ns, agc, dgc, mobile, cn, vad, ext_filter, intelligibility, experimental_agc, beamforming;
24253a5a1b3Sopenharmony_ci    int rm = -1, i;
24353a5a1b3Sopenharmony_ci    uint32_t agc_start_volume;
24453a5a1b3Sopenharmony_ci    pa_modargs *ma;
24553a5a1b3Sopenharmony_ci    bool trace = false;
24653a5a1b3Sopenharmony_ci
24753a5a1b3Sopenharmony_ci    if (!(ma = pa_modargs_new(args, valid_modargs))) {
24853a5a1b3Sopenharmony_ci        pa_log("Failed to parse submodule arguments.");
24953a5a1b3Sopenharmony_ci        goto fail;
25053a5a1b3Sopenharmony_ci    }
25153a5a1b3Sopenharmony_ci
25253a5a1b3Sopenharmony_ci    hpf = DEFAULT_HIGH_PASS_FILTER;
25353a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "high_pass_filter", &hpf) < 0) {
25453a5a1b3Sopenharmony_ci        pa_log("Failed to parse high_pass_filter value");
25553a5a1b3Sopenharmony_ci        goto fail;
25653a5a1b3Sopenharmony_ci    }
25753a5a1b3Sopenharmony_ci
25853a5a1b3Sopenharmony_ci    ns = DEFAULT_NOISE_SUPPRESSION;
25953a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "noise_suppression", &ns) < 0) {
26053a5a1b3Sopenharmony_ci        pa_log("Failed to parse noise_suppression value");
26153a5a1b3Sopenharmony_ci        goto fail;
26253a5a1b3Sopenharmony_ci    }
26353a5a1b3Sopenharmony_ci
26453a5a1b3Sopenharmony_ci    agc = DEFAULT_ANALOG_GAIN_CONTROL;
26553a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "analog_gain_control", &agc) < 0) {
26653a5a1b3Sopenharmony_ci        pa_log("Failed to parse analog_gain_control value");
26753a5a1b3Sopenharmony_ci        goto fail;
26853a5a1b3Sopenharmony_ci    }
26953a5a1b3Sopenharmony_ci
27053a5a1b3Sopenharmony_ci    dgc = agc ? false : DEFAULT_DIGITAL_GAIN_CONTROL;
27153a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "digital_gain_control", &dgc) < 0) {
27253a5a1b3Sopenharmony_ci        pa_log("Failed to parse digital_gain_control value");
27353a5a1b3Sopenharmony_ci        goto fail;
27453a5a1b3Sopenharmony_ci    }
27553a5a1b3Sopenharmony_ci
27653a5a1b3Sopenharmony_ci    if (agc && dgc) {
27753a5a1b3Sopenharmony_ci        pa_log("You must pick only one between analog and digital gain control");
27853a5a1b3Sopenharmony_ci        goto fail;
27953a5a1b3Sopenharmony_ci    }
28053a5a1b3Sopenharmony_ci
28153a5a1b3Sopenharmony_ci    mobile = DEFAULT_MOBILE;
28253a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "mobile", &mobile) < 0) {
28353a5a1b3Sopenharmony_ci        pa_log("Failed to parse mobile value");
28453a5a1b3Sopenharmony_ci        goto fail;
28553a5a1b3Sopenharmony_ci    }
28653a5a1b3Sopenharmony_ci
28753a5a1b3Sopenharmony_ci    ec->params.drift_compensation = DEFAULT_DRIFT_COMPENSATION;
28853a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "drift_compensation", &ec->params.drift_compensation) < 0) {
28953a5a1b3Sopenharmony_ci        pa_log("Failed to parse drift_compensation value");
29053a5a1b3Sopenharmony_ci        goto fail;
29153a5a1b3Sopenharmony_ci    }
29253a5a1b3Sopenharmony_ci
29353a5a1b3Sopenharmony_ci    if (mobile) {
29453a5a1b3Sopenharmony_ci        if (ec->params.drift_compensation) {
29553a5a1b3Sopenharmony_ci            pa_log("Can't use drift_compensation in mobile mode");
29653a5a1b3Sopenharmony_ci            goto fail;
29753a5a1b3Sopenharmony_ci        }
29853a5a1b3Sopenharmony_ci
29953a5a1b3Sopenharmony_ci        if ((rm = routing_mode_from_string(pa_modargs_get_value(ma, "routing_mode", DEFAULT_ROUTING_MODE))) < 0) {
30053a5a1b3Sopenharmony_ci            pa_log("Failed to parse routing_mode value");
30153a5a1b3Sopenharmony_ci            goto fail;
30253a5a1b3Sopenharmony_ci        }
30353a5a1b3Sopenharmony_ci
30453a5a1b3Sopenharmony_ci        cn = DEFAULT_COMFORT_NOISE;
30553a5a1b3Sopenharmony_ci        if (pa_modargs_get_value_boolean(ma, "comfort_noise", &cn) < 0) {
30653a5a1b3Sopenharmony_ci            pa_log("Failed to parse cn value");
30753a5a1b3Sopenharmony_ci            goto fail;
30853a5a1b3Sopenharmony_ci        }
30953a5a1b3Sopenharmony_ci    } else {
31053a5a1b3Sopenharmony_ci        if (pa_modargs_get_value(ma, "comfort_noise", NULL) || pa_modargs_get_value(ma, "routing_mode", NULL)) {
31153a5a1b3Sopenharmony_ci            pa_log("The routing_mode and comfort_noise options are only valid with mobile=true");
31253a5a1b3Sopenharmony_ci            goto fail;
31353a5a1b3Sopenharmony_ci        }
31453a5a1b3Sopenharmony_ci    }
31553a5a1b3Sopenharmony_ci
31653a5a1b3Sopenharmony_ci    vad = DEFAULT_VAD;
31753a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "voice_detection", &vad) < 0) {
31853a5a1b3Sopenharmony_ci        pa_log("Failed to parse voice_detection value");
31953a5a1b3Sopenharmony_ci        goto fail;
32053a5a1b3Sopenharmony_ci    }
32153a5a1b3Sopenharmony_ci
32253a5a1b3Sopenharmony_ci    ext_filter = DEFAULT_EXTENDED_FILTER;
32353a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "extended_filter", &ext_filter) < 0) {
32453a5a1b3Sopenharmony_ci        pa_log("Failed to parse extended_filter value");
32553a5a1b3Sopenharmony_ci        goto fail;
32653a5a1b3Sopenharmony_ci    }
32753a5a1b3Sopenharmony_ci
32853a5a1b3Sopenharmony_ci    intelligibility = DEFAULT_INTELLIGIBILITY_ENHANCER;
32953a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "intelligibility_enhancer", &intelligibility) < 0) {
33053a5a1b3Sopenharmony_ci        pa_log("Failed to parse intelligibility_enhancer value");
33153a5a1b3Sopenharmony_ci        goto fail;
33253a5a1b3Sopenharmony_ci    }
33353a5a1b3Sopenharmony_ci
33453a5a1b3Sopenharmony_ci    experimental_agc = DEFAULT_EXPERIMENTAL_AGC;
33553a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "experimental_agc", &experimental_agc) < 0) {
33653a5a1b3Sopenharmony_ci        pa_log("Failed to parse experimental_agc value");
33753a5a1b3Sopenharmony_ci        goto fail;
33853a5a1b3Sopenharmony_ci    }
33953a5a1b3Sopenharmony_ci
34053a5a1b3Sopenharmony_ci    agc_start_volume = DEFAULT_AGC_START_VOLUME;
34153a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_u32(ma, "agc_start_volume", &agc_start_volume) < 0) {
34253a5a1b3Sopenharmony_ci        pa_log("Failed to parse agc_start_volume value");
34353a5a1b3Sopenharmony_ci        goto fail;
34453a5a1b3Sopenharmony_ci    }
34553a5a1b3Sopenharmony_ci    if (agc_start_volume > WEBRTC_AGC_MAX_VOLUME) {
34653a5a1b3Sopenharmony_ci        pa_log("AGC start volume must not exceed %u", WEBRTC_AGC_MAX_VOLUME);
34753a5a1b3Sopenharmony_ci        goto fail;
34853a5a1b3Sopenharmony_ci    }
34953a5a1b3Sopenharmony_ci    ec->params.webrtc.agc_start_volume = agc_start_volume;
35053a5a1b3Sopenharmony_ci
35153a5a1b3Sopenharmony_ci    beamforming = DEFAULT_BEAMFORMING;
35253a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "beamforming", &beamforming) < 0) {
35353a5a1b3Sopenharmony_ci        pa_log("Failed to parse beamforming value");
35453a5a1b3Sopenharmony_ci        goto fail;
35553a5a1b3Sopenharmony_ci    }
35653a5a1b3Sopenharmony_ci
35753a5a1b3Sopenharmony_ci    if (ext_filter)
35853a5a1b3Sopenharmony_ci        config.Set<webrtc::ExtendedFilter>(new webrtc::ExtendedFilter(true));
35953a5a1b3Sopenharmony_ci    if (intelligibility)
36053a5a1b3Sopenharmony_ci        pa_log_warn("The intelligibility enhancer is not currently supported");
36153a5a1b3Sopenharmony_ci    if (experimental_agc)
36253a5a1b3Sopenharmony_ci        config.Set<webrtc::ExperimentalAgc>(new webrtc::ExperimentalAgc(true, ec->params.webrtc.agc_start_volume));
36353a5a1b3Sopenharmony_ci
36453a5a1b3Sopenharmony_ci    trace = DEFAULT_TRACE;
36553a5a1b3Sopenharmony_ci    if (pa_modargs_get_value_boolean(ma, "trace", &trace) < 0) {
36653a5a1b3Sopenharmony_ci        pa_log("Failed to parse trace value");
36753a5a1b3Sopenharmony_ci        goto fail;
36853a5a1b3Sopenharmony_ci    }
36953a5a1b3Sopenharmony_ci
37053a5a1b3Sopenharmony_ci    if (trace) {
37153a5a1b3Sopenharmony_ci        webrtc::Trace::CreateTrace();
37253a5a1b3Sopenharmony_ci        webrtc::Trace::set_level_filter(webrtc::kTraceAll);
37353a5a1b3Sopenharmony_ci        ec->params.webrtc.trace_callback = new PaWebrtcTraceCallback();
37453a5a1b3Sopenharmony_ci        webrtc::Trace::SetTraceCallback((PaWebrtcTraceCallback *) ec->params.webrtc.trace_callback);
37553a5a1b3Sopenharmony_ci    }
37653a5a1b3Sopenharmony_ci
37753a5a1b3Sopenharmony_ci    webrtc_ec_fixate_spec(rec_ss, rec_map, play_ss, play_map, out_ss, out_map, beamforming);
37853a5a1b3Sopenharmony_ci
37953a5a1b3Sopenharmony_ci    /* We do this after fixate because we need the capture channel count */
38053a5a1b3Sopenharmony_ci    if (beamforming) {
38153a5a1b3Sopenharmony_ci        std::vector<webrtc::Point> geometry(rec_ss->channels);
38253a5a1b3Sopenharmony_ci        webrtc::SphericalPointf direction(0.0f, 0.0f, 0.0f);
38353a5a1b3Sopenharmony_ci        const char *mic_geometry, *target_direction;
38453a5a1b3Sopenharmony_ci
38553a5a1b3Sopenharmony_ci        if (!(mic_geometry = pa_modargs_get_value(ma, "mic_geometry", NULL))) {
38653a5a1b3Sopenharmony_ci            pa_log("mic_geometry must be set if beamforming is enabled");
38753a5a1b3Sopenharmony_ci            goto fail;
38853a5a1b3Sopenharmony_ci        }
38953a5a1b3Sopenharmony_ci
39053a5a1b3Sopenharmony_ci        if (!parse_mic_geometry(&mic_geometry, geometry)) {
39153a5a1b3Sopenharmony_ci            pa_log("Failed to parse mic_geometry value");
39253a5a1b3Sopenharmony_ci            goto fail;
39353a5a1b3Sopenharmony_ci        }
39453a5a1b3Sopenharmony_ci
39553a5a1b3Sopenharmony_ci        if ((target_direction = pa_modargs_get_value(ma, "target_direction", NULL))) {
39653a5a1b3Sopenharmony_ci            float f[3];
39753a5a1b3Sopenharmony_ci
39853a5a1b3Sopenharmony_ci            if (!parse_point(&target_direction, f)) {
39953a5a1b3Sopenharmony_ci                pa_log("Failed to parse target_direction value");
40053a5a1b3Sopenharmony_ci                goto fail;
40153a5a1b3Sopenharmony_ci            }
40253a5a1b3Sopenharmony_ci
40353a5a1b3Sopenharmony_ci            if (*target_direction != '\0') {
40453a5a1b3Sopenharmony_ci                pa_log("Failed to parse target_direction value: more parameters than expected");
40553a5a1b3Sopenharmony_ci                goto fail;
40653a5a1b3Sopenharmony_ci            }
40753a5a1b3Sopenharmony_ci
40853a5a1b3Sopenharmony_ci#define IS_ZERO(f) ((f) < 0.000001 && (f) > -0.000001)
40953a5a1b3Sopenharmony_ci
41053a5a1b3Sopenharmony_ci            if (!IS_ZERO(f[1]) || !IS_ZERO(f[2])) {
41153a5a1b3Sopenharmony_ci                pa_log("The beamformer currently only supports targeting along the azimuth");
41253a5a1b3Sopenharmony_ci                goto fail;
41353a5a1b3Sopenharmony_ci            }
41453a5a1b3Sopenharmony_ci
41553a5a1b3Sopenharmony_ci            direction.s[0] = f[0];
41653a5a1b3Sopenharmony_ci            direction.s[1] = f[1];
41753a5a1b3Sopenharmony_ci            direction.s[2] = f[2];
41853a5a1b3Sopenharmony_ci        }
41953a5a1b3Sopenharmony_ci
42053a5a1b3Sopenharmony_ci        if (!target_direction)
42153a5a1b3Sopenharmony_ci            config.Set<webrtc::Beamforming>(new webrtc::Beamforming(true, geometry));
42253a5a1b3Sopenharmony_ci        else
42353a5a1b3Sopenharmony_ci            config.Set<webrtc::Beamforming>(new webrtc::Beamforming(true, geometry, direction));
42453a5a1b3Sopenharmony_ci    }
42553a5a1b3Sopenharmony_ci
42653a5a1b3Sopenharmony_ci    apm = webrtc::AudioProcessing::Create(config);
42753a5a1b3Sopenharmony_ci
42853a5a1b3Sopenharmony_ci    pconfig = {
42953a5a1b3Sopenharmony_ci        webrtc::StreamConfig(rec_ss->rate, rec_ss->channels, false), /* input stream */
43053a5a1b3Sopenharmony_ci        webrtc::StreamConfig(out_ss->rate, out_ss->channels, false), /* output stream */
43153a5a1b3Sopenharmony_ci        webrtc::StreamConfig(play_ss->rate, play_ss->channels, false), /* reverse input stream */
43253a5a1b3Sopenharmony_ci        webrtc::StreamConfig(play_ss->rate, play_ss->channels, false), /* reverse output stream */
43353a5a1b3Sopenharmony_ci    };
43453a5a1b3Sopenharmony_ci    if (apm->Initialize(pconfig) != webrtc::AudioProcessing::kNoError) {
43553a5a1b3Sopenharmony_ci        pa_log("Error initialising audio processing module");
43653a5a1b3Sopenharmony_ci        goto fail;
43753a5a1b3Sopenharmony_ci    }
43853a5a1b3Sopenharmony_ci
43953a5a1b3Sopenharmony_ci    if (hpf)
44053a5a1b3Sopenharmony_ci        apm->high_pass_filter()->Enable(true);
44153a5a1b3Sopenharmony_ci
44253a5a1b3Sopenharmony_ci    if (!mobile) {
44353a5a1b3Sopenharmony_ci        apm->echo_cancellation()->enable_drift_compensation(ec->params.drift_compensation);
44453a5a1b3Sopenharmony_ci        apm->echo_cancellation()->Enable(true);
44553a5a1b3Sopenharmony_ci    } else {
44653a5a1b3Sopenharmony_ci        apm->echo_control_mobile()->set_routing_mode(static_cast<webrtc::EchoControlMobile::RoutingMode>(rm));
44753a5a1b3Sopenharmony_ci        apm->echo_control_mobile()->enable_comfort_noise(cn);
44853a5a1b3Sopenharmony_ci        apm->echo_control_mobile()->Enable(true);
44953a5a1b3Sopenharmony_ci    }
45053a5a1b3Sopenharmony_ci
45153a5a1b3Sopenharmony_ci    if (ns) {
45253a5a1b3Sopenharmony_ci        apm->noise_suppression()->set_level(webrtc::NoiseSuppression::kHigh);
45353a5a1b3Sopenharmony_ci        apm->noise_suppression()->Enable(true);
45453a5a1b3Sopenharmony_ci    }
45553a5a1b3Sopenharmony_ci
45653a5a1b3Sopenharmony_ci    if (agc || dgc) {
45753a5a1b3Sopenharmony_ci        if (mobile && rm <= webrtc::EchoControlMobile::kEarpiece) {
45853a5a1b3Sopenharmony_ci            /* Maybe this should be a knob, but we've got a lot of knobs already */
45953a5a1b3Sopenharmony_ci            apm->gain_control()->set_mode(webrtc::GainControl::kFixedDigital);
46053a5a1b3Sopenharmony_ci            ec->params.webrtc.agc = false;
46153a5a1b3Sopenharmony_ci        } else if (dgc) {
46253a5a1b3Sopenharmony_ci            apm->gain_control()->set_mode(webrtc::GainControl::kAdaptiveDigital);
46353a5a1b3Sopenharmony_ci            ec->params.webrtc.agc = false;
46453a5a1b3Sopenharmony_ci        } else {
46553a5a1b3Sopenharmony_ci            apm->gain_control()->set_mode(webrtc::GainControl::kAdaptiveAnalog);
46653a5a1b3Sopenharmony_ci            if (apm->gain_control()->set_analog_level_limits(0, WEBRTC_AGC_MAX_VOLUME) !=
46753a5a1b3Sopenharmony_ci                    webrtc::AudioProcessing::kNoError) {
46853a5a1b3Sopenharmony_ci                pa_log("Failed to initialise AGC");
46953a5a1b3Sopenharmony_ci                goto fail;
47053a5a1b3Sopenharmony_ci            }
47153a5a1b3Sopenharmony_ci            ec->params.webrtc.agc = true;
47253a5a1b3Sopenharmony_ci        }
47353a5a1b3Sopenharmony_ci
47453a5a1b3Sopenharmony_ci        apm->gain_control()->Enable(true);
47553a5a1b3Sopenharmony_ci    }
47653a5a1b3Sopenharmony_ci
47753a5a1b3Sopenharmony_ci    if (vad)
47853a5a1b3Sopenharmony_ci        apm->voice_detection()->Enable(true);
47953a5a1b3Sopenharmony_ci
48053a5a1b3Sopenharmony_ci    ec->params.webrtc.apm = apm;
48153a5a1b3Sopenharmony_ci    ec->params.webrtc.rec_ss = *rec_ss;
48253a5a1b3Sopenharmony_ci    ec->params.webrtc.play_ss = *play_ss;
48353a5a1b3Sopenharmony_ci    ec->params.webrtc.out_ss = *out_ss;
48453a5a1b3Sopenharmony_ci    ec->params.webrtc.blocksize = (uint64_t) out_ss->rate * BLOCK_SIZE_US / PA_USEC_PER_SEC;
48553a5a1b3Sopenharmony_ci    *nframes = ec->params.webrtc.blocksize;
48653a5a1b3Sopenharmony_ci    ec->params.webrtc.first = true;
48753a5a1b3Sopenharmony_ci
48853a5a1b3Sopenharmony_ci    for (i = 0; i < rec_ss->channels; i++)
48953a5a1b3Sopenharmony_ci        ec->params.webrtc.rec_buffer[i] = pa_xnew(float, *nframes);
49053a5a1b3Sopenharmony_ci    for (i = 0; i < play_ss->channels; i++)
49153a5a1b3Sopenharmony_ci        ec->params.webrtc.play_buffer[i] = pa_xnew(float, *nframes);
49253a5a1b3Sopenharmony_ci
49353a5a1b3Sopenharmony_ci    pa_modargs_free(ma);
49453a5a1b3Sopenharmony_ci    return true;
49553a5a1b3Sopenharmony_ci
49653a5a1b3Sopenharmony_cifail:
49753a5a1b3Sopenharmony_ci    if (ma)
49853a5a1b3Sopenharmony_ci        pa_modargs_free(ma);
49953a5a1b3Sopenharmony_ci    if (ec->params.webrtc.trace_callback) {
50053a5a1b3Sopenharmony_ci        webrtc::Trace::ReturnTrace();
50153a5a1b3Sopenharmony_ci        delete ((PaWebrtcTraceCallback *) ec->params.webrtc.trace_callback);
50253a5a1b3Sopenharmony_ci    } if (apm)
50353a5a1b3Sopenharmony_ci        delete apm;
50453a5a1b3Sopenharmony_ci
50553a5a1b3Sopenharmony_ci    return false;
50653a5a1b3Sopenharmony_ci}
50753a5a1b3Sopenharmony_ci
50853a5a1b3Sopenharmony_civoid pa_webrtc_ec_play(pa_echo_canceller *ec, const uint8_t *play) {
50953a5a1b3Sopenharmony_ci    webrtc::AudioProcessing *apm = (webrtc::AudioProcessing*)ec->params.webrtc.apm;
51053a5a1b3Sopenharmony_ci    const pa_sample_spec *ss = &ec->params.webrtc.play_ss;
51153a5a1b3Sopenharmony_ci    int n = ec->params.webrtc.blocksize;
51253a5a1b3Sopenharmony_ci    float **buf = ec->params.webrtc.play_buffer;
51353a5a1b3Sopenharmony_ci    webrtc::StreamConfig config(ss->rate, ss->channels, false);
51453a5a1b3Sopenharmony_ci
51553a5a1b3Sopenharmony_ci    pa_deinterleave(play, (void **) buf, ss->channels, pa_sample_size(ss), n);
51653a5a1b3Sopenharmony_ci
51753a5a1b3Sopenharmony_ci    pa_assert_se(apm->ProcessReverseStream(buf, config, config, buf) == webrtc::AudioProcessing::kNoError);
51853a5a1b3Sopenharmony_ci
51953a5a1b3Sopenharmony_ci    /* FIXME: If ProcessReverseStream() makes any changes to the audio, such as
52053a5a1b3Sopenharmony_ci     * applying intelligibility enhancement, those changes don't have any
52153a5a1b3Sopenharmony_ci     * effect. This function is called at the source side, but the processing
52253a5a1b3Sopenharmony_ci     * would have to be done in the sink to be able to feed the processed audio
52353a5a1b3Sopenharmony_ci     * to speakers. */
52453a5a1b3Sopenharmony_ci}
52553a5a1b3Sopenharmony_ci
52653a5a1b3Sopenharmony_civoid pa_webrtc_ec_record(pa_echo_canceller *ec, const uint8_t *rec, uint8_t *out) {
52753a5a1b3Sopenharmony_ci    webrtc::AudioProcessing *apm = (webrtc::AudioProcessing*)ec->params.webrtc.apm;
52853a5a1b3Sopenharmony_ci    const pa_sample_spec *rec_ss = &ec->params.webrtc.rec_ss;
52953a5a1b3Sopenharmony_ci    const pa_sample_spec *out_ss = &ec->params.webrtc.out_ss;
53053a5a1b3Sopenharmony_ci    float **buf = ec->params.webrtc.rec_buffer;
53153a5a1b3Sopenharmony_ci    int n = ec->params.webrtc.blocksize;
53253a5a1b3Sopenharmony_ci    int old_volume, new_volume;
53353a5a1b3Sopenharmony_ci    webrtc::StreamConfig rec_config(rec_ss->rate, rec_ss->channels, false);
53453a5a1b3Sopenharmony_ci    webrtc::StreamConfig out_config(out_ss->rate, out_ss->channels, false);
53553a5a1b3Sopenharmony_ci
53653a5a1b3Sopenharmony_ci    pa_deinterleave(rec, (void **) buf, rec_ss->channels, pa_sample_size(rec_ss), n);
53753a5a1b3Sopenharmony_ci
53853a5a1b3Sopenharmony_ci    if (ec->params.webrtc.agc) {
53953a5a1b3Sopenharmony_ci        pa_volume_t v = pa_echo_canceller_get_capture_volume(ec);
54053a5a1b3Sopenharmony_ci        old_volume = webrtc_volume_from_pa(v);
54153a5a1b3Sopenharmony_ci        apm->gain_control()->set_stream_analog_level(old_volume);
54253a5a1b3Sopenharmony_ci    }
54353a5a1b3Sopenharmony_ci
54453a5a1b3Sopenharmony_ci    apm->set_stream_delay_ms(0);
54553a5a1b3Sopenharmony_ci    pa_assert_se(apm->ProcessStream(buf, rec_config, out_config, buf) == webrtc::AudioProcessing::kNoError);
54653a5a1b3Sopenharmony_ci
54753a5a1b3Sopenharmony_ci    if (ec->params.webrtc.agc) {
54853a5a1b3Sopenharmony_ci        if (PA_UNLIKELY(ec->params.webrtc.first)) {
54953a5a1b3Sopenharmony_ci            /* We start at a sane default volume (taken from the Chromium
55053a5a1b3Sopenharmony_ci             * condition on the experimental AGC in audio_processing.h). This is
55153a5a1b3Sopenharmony_ci             * needed to make sure that there's enough energy in the capture
55253a5a1b3Sopenharmony_ci             * signal for the AGC to work */
55353a5a1b3Sopenharmony_ci            ec->params.webrtc.first = false;
55453a5a1b3Sopenharmony_ci            new_volume = ec->params.webrtc.agc_start_volume;
55553a5a1b3Sopenharmony_ci        } else {
55653a5a1b3Sopenharmony_ci            new_volume = apm->gain_control()->stream_analog_level();
55753a5a1b3Sopenharmony_ci        }
55853a5a1b3Sopenharmony_ci
55953a5a1b3Sopenharmony_ci        if (old_volume != new_volume)
56053a5a1b3Sopenharmony_ci            pa_echo_canceller_set_capture_volume(ec, webrtc_volume_to_pa(new_volume));
56153a5a1b3Sopenharmony_ci    }
56253a5a1b3Sopenharmony_ci
56353a5a1b3Sopenharmony_ci    pa_interleave((const void **) buf, out_ss->channels, out, pa_sample_size(out_ss), n);
56453a5a1b3Sopenharmony_ci}
56553a5a1b3Sopenharmony_ci
56653a5a1b3Sopenharmony_civoid pa_webrtc_ec_set_drift(pa_echo_canceller *ec, float drift) {
56753a5a1b3Sopenharmony_ci    webrtc::AudioProcessing *apm = (webrtc::AudioProcessing*)ec->params.webrtc.apm;
56853a5a1b3Sopenharmony_ci
56953a5a1b3Sopenharmony_ci    apm->echo_cancellation()->set_stream_drift_samples(drift * ec->params.webrtc.blocksize);
57053a5a1b3Sopenharmony_ci}
57153a5a1b3Sopenharmony_ci
57253a5a1b3Sopenharmony_civoid pa_webrtc_ec_run(pa_echo_canceller *ec, const uint8_t *rec, const uint8_t *play, uint8_t *out) {
57353a5a1b3Sopenharmony_ci    pa_webrtc_ec_play(ec, play);
57453a5a1b3Sopenharmony_ci    pa_webrtc_ec_record(ec, rec, out);
57553a5a1b3Sopenharmony_ci}
57653a5a1b3Sopenharmony_ci
57753a5a1b3Sopenharmony_civoid pa_webrtc_ec_done(pa_echo_canceller *ec) {
57853a5a1b3Sopenharmony_ci    int i;
57953a5a1b3Sopenharmony_ci
58053a5a1b3Sopenharmony_ci    if (ec->params.webrtc.trace_callback) {
58153a5a1b3Sopenharmony_ci        webrtc::Trace::ReturnTrace();
58253a5a1b3Sopenharmony_ci        delete ((PaWebrtcTraceCallback *) ec->params.webrtc.trace_callback);
58353a5a1b3Sopenharmony_ci    }
58453a5a1b3Sopenharmony_ci
58553a5a1b3Sopenharmony_ci    if (ec->params.webrtc.apm) {
58653a5a1b3Sopenharmony_ci        delete (webrtc::AudioProcessing*)ec->params.webrtc.apm;
58753a5a1b3Sopenharmony_ci        ec->params.webrtc.apm = NULL;
58853a5a1b3Sopenharmony_ci    }
58953a5a1b3Sopenharmony_ci
59053a5a1b3Sopenharmony_ci    for (i = 0; i < ec->params.webrtc.rec_ss.channels; i++)
59153a5a1b3Sopenharmony_ci        pa_xfree(ec->params.webrtc.rec_buffer[i]);
59253a5a1b3Sopenharmony_ci    for (i = 0; i < ec->params.webrtc.play_ss.channels; i++)
59353a5a1b3Sopenharmony_ci        pa_xfree(ec->params.webrtc.play_buffer[i]);
59453a5a1b3Sopenharmony_ci}
595