1# Developing Audio Call
2
3During an audio call, audio output (playing the peer voice) and audio input (recording the local voice) are carried out simultaneously. You can use the AudioRenderer to implement audio output and the AudioCapturer to implement audio input.
4
5Before starting or stopping using the audio call service, the application needs to check the [audio scene](audio-call-overview.md#audio-scene) and [ringer mode](audio-call-overview.md#ringer-mode) to adopt proper audio management and prompt policies.
6
7The sample code below demonstrates the basic process of using the AudioRenderer and AudioCapturer to implement the audio call service, without the process of call data transmission. In actual development, the peer call data transmitted over the network needs to be decoded and played, and the sample code uses the process of reading an audio file instead; the local call data needs to be encoded and packed and then sent to the peer over the network, and the sample code uses the process of writing an audio file instead.
8
9## Using AudioRenderer to Play the Peer Voice
10
11This process is similar to the process of [using AudioRenderer to develop audio playback](using-audiorenderer-for-playback.md). The key differences lie in the **audioRendererInfo** parameter and audio data source. In the **audioRendererInfo** parameter used for audio calling, **content** must be set to **CONTENT_TYPE_SPEECH**, and **usage** must be set to **STREAM_USAGE_VOICE_COMMUNICATION**.
12
13```ts
14import { audio } from '@kit.AudioKit';
15import { fileIo } from '@kit.CoreFileKit';
16import { BusinessError } from '@kit.BasicServicesKit';
17
18const TAG = 'VoiceCallDemoForAudioRenderer';
19// The process is similar to the process of using AudioRenderer to develop audio playback. The key differences lie in the audioRendererInfo parameter and audio data source.
20class Options {
21  offset?: number;
22  length?: number;
23}
24let context = getContext(this);
25let bufferSize: number = 0;
26let renderModel: audio.AudioRenderer | undefined = undefined;
27let audioStreamInfo: audio.AudioStreamInfo = {
28  samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, // Sampling rate.
29  channels: audio.AudioChannel.CHANNEL_2, // Channel.
30  sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, // Sampling format.
31  encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW // Encoding format.
32}
33let audioRendererInfo: audio.AudioRendererInfo = {
34  // Set the parameters related to the call scenario.
35  usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION, // Audio stream usage type: VoIP call.
36  rendererFlags: 0 // AudioRenderer flag. The default value is 0.
37}
38let audioRendererOptions: audio.AudioRendererOptions = {
39  streamInfo: audioStreamInfo,
40  rendererInfo: audioRendererInfo
41}
42
43let path = getContext().cacheDir;
44// Ensure that the resource exists in the sandbox path.
45let filePath = path + '/StarWars10s-2C-48000-4SW.wav';
46let file: fileIo.File = fileIo.openSync(filePath, fileIo.OpenMode.READ_ONLY);
47
48let writeDataCallback = (buffer: ArrayBuffer) => {
49  let options: Options = {
50    offset: bufferSize,
51    length: buffer.byteLength
52  }
53  fileIo.readSync(file.fd, buffer, options);
54  bufferSize += buffer.byteLength;
55}
56
57// Create an AudioRenderer instance, and set the events to listen for.
58audio.createAudioRenderer(audioRendererOptions, (err: BusinessError, renderer: audio.AudioRenderer) => { // Create an AudioRenderer instance.
59  if (!err) {
60    console.info(`${TAG}: creating AudioRenderer success`);
61    renderModel = renderer;
62    if (renderModel !== undefined) {
63      renderModel.on('stateChange', (state: audio.AudioState) => { // Set the events to listen for. A callback is invoked when the AudioRenderer is switched to the specified state.
64        if (state == 1) {
65          console.info('audio renderer state is: STATE_PREPARED');
66        }
67        if (state == 2) {
68          console.info('audio renderer state is: STATE_RUNNING');
69        }
70      });
71      renderModel.on('markReach', 1000, (position: number) => { // Subscribe to the markReach event. A callback is triggered when the number of rendered frames reaches 1000.
72        if (position == 1000) {
73          console.info('ON Triggered successfully');
74        }
75      });
76      renderModel.on('writeData', writeDataCallback);
77    }
78  } else {
79    console.info(`${TAG}: creating AudioRenderer failed, error: ${err.message}`);
80  }
81});
82
83// Start audio rendering.
84async function start() {
85  if (renderModel !== undefined) {
86    let stateGroup: number[] = [audio.AudioState.STATE_PREPARED, audio.AudioState.STATE_PAUSED, audio.AudioState.STATE_STOPPED];
87    if (stateGroup.indexOf(renderModel.state.valueOf()) === -1) { // Rendering can be started only when the AudioRenderer is in the STATE_PREPARED, STATE_PAUSED, or STATE_STOPPED state.
88      console.error(TAG + 'start failed');
89      return;
90    }
91    renderModel.start((err: BusinessError) => {
92      if (err) {
93        console.error('Renderer start failed.');
94      } else {
95        console.info('Renderer start success.');
96      }
97    });
98  }
99}
100
101// Pause the rendering.
102async function pause() {
103  if (renderModel !== undefined) {
104    // Rendering can be paused only when the AudioRenderer is in the STATE_RUNNING state.
105    if (renderModel.state.valueOf() !== audio.AudioState.STATE_RUNNING) {
106      console.info('Renderer is not running');
107      return;
108    }
109    await renderModel.pause(); // Pause rendering.
110    if (renderModel.state.valueOf() === audio.AudioState.STATE_PAUSED) {
111      console.info('Renderer is paused.');
112    } else {
113      console.error('Pausing renderer failed.');
114    }
115  }
116}
117
118// Stop rendering.
119async function stop() {
120  if (renderModel !== undefined) {
121    // The AudioRenderer can be stopped only when it is in the STATE_RUNNING or STATE_PAUSED state.
122    if (renderModel.state.valueOf() !== audio.AudioState.STATE_RUNNING && renderModel.state.valueOf() !== audio.AudioState.STATE_PAUSED) {
123      console.info('Renderer is not running or paused.');
124      return;
125    }
126    await renderModel.stop(); // Stop rendering.
127    if (renderModel.state.valueOf() === audio.AudioState.STATE_STOPPED) {
128      console.info('Renderer stopped.');
129    } else {
130      console.error('Stopping renderer failed.');
131    }
132  }
133}
134
135// Release the instance.
136async function release() {
137  if (renderModel !== undefined) {
138    // The AudioRenderer can be released only when it is not in the STATE_RELEASED state.
139    if (renderModel.state.valueOf() === audio.AudioState.STATE_RELEASED) {
140      console.info('Renderer already released');
141      return;
142    }
143    await renderModel.release(); // Release the instance.
144    if (renderModel.state.valueOf() === audio.AudioState.STATE_RELEASED) {
145      console.info('Renderer released');
146    } else {
147      console.error('Renderer release failed.');
148    }
149  }
150}
151```
152
153## Using AudioCapturer to Record the Local Voice
154
155This process is similar to the process of [using AudioCapturer to develop audio recording](using-audiocapturer-for-recording.md). The key differences lie in the **audioCapturerInfo** parameter and audio data stream direction. In the **audioCapturerInfo** parameter used for audio calling, **source** must be set to **SOURCE_TYPE_VOICE_COMMUNICATION**.
156
157You must request the **ohos.permission.MICROPHONE** permission for all recording tasks. For details, see [Requesting User Authorization](../../security/AccessToken/request-user-authorization.md).
158
159```ts
160import { audio } from '@kit.AudioKit';
161import { fileIo } from '@kit.CoreFileKit';
162import { BusinessError } from '@kit.BasicServicesKit';
163
164let context = getContext(this);
165const TAG = 'VoiceCallDemoForAudioCapturer';
166class Options {
167  offset?: number;
168  length?: number;
169}
170// The process is similar to the process of using AudioCapturer to develop audio recording. The key differences lie in the audioCapturerInfo parameter and audio data stream direction.
171let bufferSize: number = 0;
172let audioCapturer: audio.AudioCapturer | undefined = undefined;
173let audioStreamInfo: audio.AudioStreamInfo = {
174  samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, // Sampling rate.
175  channels: audio.AudioChannel.CHANNEL_1, // Channel.
176  sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, // Sampling format.
177  encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW // Encoding format.
178}
179let audioCapturerInfo: audio.AudioCapturerInfo = {
180  // Set the parameters related to the call scenario.
181  source: audio.SourceType.SOURCE_TYPE_VOICE_COMMUNICATION, // Audio source type: voice communication.
182  capturerFlags: 0 // AudioCapturer flag. The default value is 0.
183}
184let audioCapturerOptions: audio.AudioCapturerOptions = {
185  streamInfo: audioStreamInfo,
186  capturerInfo: audioCapturerInfo
187}
188
189let path = getContext().cacheDir;
190let filePath = path + '/StarWars10s-2C-48000-4SW.wav';
191let file: fileIo.File = fileIo.openSync(filePath, fileIo.OpenMode.READ_WRITE | fileIo.OpenMode.CREATE);
192
193let readDataCallback = (buffer: ArrayBuffer) => {
194  let options: Options = {
195    offset: bufferSize,
196    length: buffer.byteLength
197  }
198  fileIo.writeSync(file.fd, buffer, options);
199  bufferSize += buffer.byteLength;
200}
201
202// Create an AudioRenderer instance, and set the events to listen for.
203async function init() {
204  audio.createAudioCapturer(audioCapturerOptions, (err: BusinessError, capturer: audio.AudioCapturer) => { // Create an AudioCapturer instance.
205    if (err) {
206      console.error(`Invoke createAudioCapturer failed, code is ${err.code}, message is ${err.message}`);
207      return;
208    }
209    console.info(`${TAG}: create AudioCapturer success`);
210    audioCapturer = capturer;
211    if (audioCapturer !== undefined) {
212      audioCapturer.on('markReach', 1000, (position: number) => { // Subscribe to the markReach event. A callback is triggered when the number of captured frames reaches 1000.
213        if (position === 1000) {
214          console.info('ON Triggered successfully');
215        }
216      });
217      audioCapturer.on('periodReach', 2000, (position: number) => { // Subscribe to the periodReach event. A callback is triggered each time when the number of captured frames reaches 2000.
218        if (position === 2000) {
219          console.info('ON Triggered successfully');
220        }
221      });
222      audioCapturer.on('readData', readDataCallback);
223    }
224  });
225}
226
227// Start audio recording.
228async function start() {
229  if (audioCapturer !== undefined) {
230    let stateGroup: number[] = [audio.AudioState.STATE_PREPARED, audio.AudioState.STATE_PAUSED, audio.AudioState.STATE_STOPPED];
231    if (stateGroup.indexOf(audioCapturer.state.valueOf()) === -1) { // Recording can be started only when the AudioCapturer is in the STATE_PREPARED, STATE_PAUSED, or STATE_STOPPED state.
232      console.error(`${TAG}: start failed`);
233      return;
234    }
235    audioCapturer.start((err: BusinessError) => {
236      if (err) {
237        console.error('Capturer start failed.');
238      } else {
239        console.info('Capturer start success.');
240      }
241    });
242  }
243}
244
245// Stop recording.
246async function stop() {
247  if (audioCapturer !== undefined) {
248    // The AudioCapturer can be stopped only when it is in STATE_RUNNING or STATE_PAUSED state.
249    if (audioCapturer.state.valueOf() !== audio.AudioState.STATE_RUNNING && audioCapturer.state.valueOf() !== audio.AudioState.STATE_PAUSED) {
250      console.info('Capturer is not running or paused');
251      return;
252    }
253    await audioCapturer.stop(); // Stop recording.
254    if (audioCapturer.state.valueOf() === audio.AudioState.STATE_STOPPED) {
255      console.info('Capturer stopped');
256    } else {
257      console.error('Capturer stop failed');
258    }
259  }
260}
261
262// Release the instance.
263async function release() {
264  if (audioCapturer !== undefined) {
265    // The AudioCapturer can be released only when it is not in the STATE_RELEASED or STATE_NEW state.
266    if (audioCapturer.state.valueOf() === audio.AudioState.STATE_RELEASED || audioCapturer.state.valueOf() === audio.AudioState.STATE_NEW) {
267      console.info('Capturer already released');
268      return;
269    }
270    await audioCapturer.release(); // Release the instance.
271    if (audioCapturer.state.valueOf() === audio.AudioState.STATE_RELEASED) {
272      console.info('Capturer released');
273    } else {
274      console.error('Capturer release failed');
275    }
276  }
277}
278```
279