1import { Audio, InterruptionModeIOS, InterruptionModeAndroid } from 'expo-av'; 2import { Platform } from 'react-native'; 3 4import { retryForStatus, waitFor } from './helpers'; 5import * as TestUtils from '../TestUtils'; 6 7export const name = 'Recording'; 8 9const defaultRecordingDurationMillis = 500; 10 11const amrSettings = { 12 android: { 13 extension: '.amr', 14 outputFormat: Audio.AndroidOutputFormat.AMR_NB, 15 audioEncoder: Audio.AndroidAudioEncoder.AMR_NB, 16 sampleRate: 8000, 17 numberOfChannels: 1, 18 bitRate: 128000, 19 }, 20 ios: { 21 extension: '.amr', 22 outputFormat: Audio.IOSOutputFormat.AMR, 23 audioQuality: Audio.IOSAudioQuality.HIGH, 24 sampleRate: 8000, 25 numberOfChannels: 1, 26 bitRate: 128000, 27 linearPCMBitDepth: 16, 28 linearPCMIsBigEndian: false, 29 linearPCMIsFloat: false, 30 }, 31}; 32 33// In some tests one can see: 34// 35// ``` 36// await recordingObject.startAsync(); 37// await waitFor(defaultRecordingDurationMillis); 38// await recordingObject.stopAndUnloadAsync(); 39// ``` 40// 41// iOS doesn't need starting to be able to `stopAndUnload`, however 42// Android throws an exception, as intended by the authors: 43// > Note that a RuntimeException is intentionally thrown to the application, 44// > if no valid audio/video data has been received when stop() is called. 45// > This happens if stop() is called immediately after start(). 46// > Source: https://developer.android.com/reference/android/media/MediaRecorder.html#stop() 47 48export async function test(t) { 49 const shouldSkipTestsRequiringPermissions = 50 await TestUtils.shouldSkipTestsRequiringPermissionsAsync(); 51 const describeWithPermissions = shouldSkipTestsRequiringPermissions ? t.xdescribe : t.describe; 52 53 describeWithPermissions('Recording', () => { 54 t.beforeAll(async () => { 55 await Audio.setAudioModeAsync({ 56 shouldDuckAndroid: true, 57 allowsRecordingIOS: true, 58 playsInSilentModeIOS: true, 59 staysActiveInBackground: true, 60 playThroughEarpieceAndroid: false, 61 interruptionModeIOS: InterruptionModeIOS.MixWithOthers, 62 interruptionModeAndroid: InterruptionModeAndroid.DuckOthers, 63 }); 64 65 await TestUtils.acceptPermissionsAndRunCommandAsync(() => { 66 return Audio.requestPermissionsAsync(); 67 }); 68 }); 69 70 // According to the documentation pausing should be supported on Android API >= 24, 71 // unfortunately such test fails on Android v24. 72 const pausingIsSupported = Platform.OS !== 'android' || Platform.Version >= 25; 73 let recordingObject = null; 74 75 t.beforeEach(async () => { 76 const { status } = await Audio.getPermissionsAsync(); 77 t.expect(status).toEqual('granted'); 78 recordingObject = new Audio.Recording(); 79 }); 80 81 t.afterEach(() => { 82 recordingObject = null; 83 }); 84 85 t.describe('Recording.prepareToRecordAsync(preset)', () => { 86 t.afterEach(async () => { 87 await recordingObject.startAsync(); 88 await waitFor(defaultRecordingDurationMillis); 89 await recordingObject.stopAndUnloadAsync(); 90 }); 91 92 t.it('sets high preset successfully', async () => { 93 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.HIGH_QUALITY); 94 }); 95 96 t.it('sets low preset successfully', async () => { 97 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 98 }); 99 100 t.it('sets custom preset successfully', async () => { 101 const commonOptions = { 102 bitRate: 8000, 103 sampleRate: 8000, 104 numberOfChannels: 1, 105 }; 106 await recordingObject.prepareToRecordAsync({ 107 android: { 108 extension: '.aac', 109 audioEncoder: Audio.AndroidAudioEncoder.AAC, 110 outputFormat: Audio.AndroidOutputFormat.AAC_ADIF, 111 ...commonOptions, 112 }, 113 ios: { 114 extension: '.ulaw', 115 linearPCMBitDepth: 8, 116 linearPCMIsFloat: false, 117 linearPCMIsBigEndian: false, 118 outputFormat: Audio.IOSOutputFormat.ULAW, 119 audioQuality: Audio.IOSAudioQuality.MEDIUM, 120 ...commonOptions, 121 }, 122 }); 123 }); 124 }); 125 126 // Such function exists in the documentation, but not in the implementation. 127 128 // t.describe('Recording.isPreparedToRecord()', () => { 129 // t.beforeEach( 130 // async () => 131 // await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY) 132 // ); 133 // t.afterEach(async () => await recordingObject.stopAndUnloadAsync()); 134 135 // t.it('returns a boolean', () => { 136 // const returnedValue = recordingObject.isPreparedToRecord(); 137 // const valueIsABoolean = returnedValue === false || returnedValue === true; 138 // t.expect(valueIsABoolean).toBe(true); 139 // }); 140 // }); 141 142 t.describe('Recording.setOnRecordingStatusUpdate(onRecordingStatusUpdate)', () => { 143 t.it('sets a function that gets called when status updates', async () => { 144 const onRecordingStatusUpdate = t.jasmine.createSpy('onRecordingStatusUpdate'); 145 recordingObject.setOnRecordingStatusUpdate(onRecordingStatusUpdate); 146 t.expect(onRecordingStatusUpdate).toHaveBeenCalledWith( 147 t.jasmine.objectContaining({ canRecord: false }) 148 ); 149 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 150 t.expect(onRecordingStatusUpdate).toHaveBeenCalledWith( 151 t.jasmine.objectContaining({ canRecord: true }) 152 ); 153 await recordingObject.startAsync(); 154 await waitFor(defaultRecordingDurationMillis); 155 await recordingObject.stopAndUnloadAsync(); 156 }); 157 158 t.it('sets a function that gets called when recording finishes', async () => { 159 const onRecordingStatusUpdate = t.jasmine.createSpy('onRecordingStatusUpdate'); 160 recordingObject.setOnRecordingStatusUpdate(onRecordingStatusUpdate); 161 t.expect(onRecordingStatusUpdate).toHaveBeenCalledWith( 162 t.jasmine.objectContaining({ canRecord: false }) 163 ); 164 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 165 t.expect(onRecordingStatusUpdate).toHaveBeenCalledWith( 166 t.jasmine.objectContaining({ canRecord: true }) 167 ); 168 await recordingObject.startAsync(); 169 await waitFor(defaultRecordingDurationMillis); 170 await recordingObject.stopAndUnloadAsync(); 171 t.expect(onRecordingStatusUpdate).toHaveBeenCalledWith( 172 t.jasmine.objectContaining({ isDoneRecording: true, canRecord: false }) 173 ); 174 }); 175 }); 176 177 /*t.describe('Recording.setProgressUpdateInterval(millis)', () => { 178 t.afterEach(async () => await recordingObject.stopAndUnloadAsync()); 179 180 t.it('sets frequence of the progress updates', async () => { 181 const onRecordingStatusUpdate = t.jasmine.createSpy('onRecordingStatusUpdate'); 182 recordingObject.setOnRecordingStatusUpdate(onRecordingStatusUpdate); 183 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 184 await recordingObject.startAsync(); 185 const updateInterval = 50; 186 recordingObject.setProgressUpdateInterval(updateInterval); 187 await new Promise(resolve => { 188 setTimeout(() => { 189 const expectedArgsCount = Platform.OS === 'android' ? 5 : 10; 190 t.expect(onRecordingStatusUpdate.calls.count()).toBeGreaterThan(expectedArgsCount); 191 192 const realMillis = map( 193 takeRight(filter(flatten(onRecordingStatusUpdate.calls.allArgs()), 'isRecording'), 4), 194 'durationMillis' 195 ); 196 197 for (let i = 3; i > 0; i--) { 198 const difference = Math.abs(realMillis[i] - realMillis[i - 1] - updateInterval); 199 t.expect(difference).toBeLessThan(updateInterval / 2 + 1); 200 } 201 202 resolve(); 203 }, 800); 204 }); 205 }); 206 });*/ 207 208 t.describe('Recording.getAvailableInputs()', () => { 209 t.afterEach(async () => { 210 await recordingObject.startAsync(); 211 await waitFor(defaultRecordingDurationMillis); 212 await recordingObject.stopAndUnloadAsync(); 213 }); 214 215 t.it('returns a list of available recording inputs', async () => { 216 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 217 218 const inputs = await recordingObject.getAvailableInputs(); 219 t.expect(inputs.length).toBeGreaterThan(0); 220 }); 221 }); 222 223 t.describe('Recording.getCurrentInput()', () => { 224 t.afterEach(async () => { 225 await recordingObject.startAsync(); 226 await waitFor(defaultRecordingDurationMillis); 227 await recordingObject.stopAndUnloadAsync(); 228 }); 229 t.it('returns the currently-selected recording input', async () => { 230 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 231 232 const input = await recordingObject.getCurrentInput(); 233 t.expect(input).toBeDefined(); 234 }); 235 }); 236 237 t.describe('Recording.setInput()', () => { 238 t.afterEach(async () => { 239 await recordingObject.startAsync(); 240 await waitFor(defaultRecordingDurationMillis); 241 await recordingObject.stopAndUnloadAsync(); 242 }); 243 t.it('sets the recording input', async () => { 244 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 245 246 const inputs = await recordingObject.getAvailableInputs(); 247 const initialInput = inputs[0]; 248 await recordingObject.setInput(initialInput.uid); 249 const currentInput = await recordingObject.getCurrentInput(); 250 t.expect(currentInput.uid).toEqual(initialInput.uid); 251 }); 252 }); 253 254 t.describe('Recording.startAsync()', () => { 255 t.afterEach(async () => { 256 await waitFor(defaultRecordingDurationMillis); 257 await recordingObject.stopAndUnloadAsync(); 258 }); 259 260 t.it('starts a clean recording', async () => { 261 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 262 await recordingObject.startAsync(); 263 await retryForStatus(recordingObject, { isRecording: true }); 264 }); 265 266 if (pausingIsSupported) { 267 t.it('starts a paused recording', async () => { 268 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 269 await recordingObject.startAsync(); 270 await retryForStatus(recordingObject, { isRecording: true }); 271 await recordingObject.pauseAsync(); 272 await retryForStatus(recordingObject, { isRecording: false }); 273 await recordingObject.startAsync(); 274 await retryForStatus(recordingObject, { isRecording: true }); 275 }); 276 } 277 }); 278 279 if (pausingIsSupported) { 280 t.describe('Recording.pauseAsync()', () => { 281 t.it('pauses the recording', async () => { 282 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 283 await recordingObject.startAsync(); 284 await retryForStatus(recordingObject, { isRecording: true }); 285 await waitFor(defaultRecordingDurationMillis); 286 await recordingObject.pauseAsync(); 287 await retryForStatus(recordingObject, { isRecording: false }); 288 await recordingObject.stopAndUnloadAsync(); 289 }); 290 }); 291 } 292 293 t.describe('Recording.getURI()', () => { 294 t.it('returns null before the recording is prepared', async () => { 295 t.expect(recordingObject.getURI()).toBeNull(); 296 }); 297 298 t.it('returns a string once the recording is prepared', async () => { 299 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 300 await recordingObject.startAsync(); 301 await waitFor(defaultRecordingDurationMillis); 302 if (Platform.OS === 'web') { 303 // On web, URI is not available until completion 304 t.expect(recordingObject.getURI()).toEqual(null); 305 } else { 306 t.expect(recordingObject.getURI()).toContain('file:///'); 307 } 308 await recordingObject.stopAndUnloadAsync(); 309 }); 310 }); 311 312 t.describe('Recording.createNewLoadedSound()', () => { 313 let originalConsoleWarn; 314 315 t.beforeAll(() => { 316 originalConsoleWarn = console.warn; 317 console.warn = (...args) => { 318 if (typeof args[0] === 'string' && args[0].indexOf('deprecated') > -1) { 319 return; 320 } 321 originalConsoleWarn(...args); 322 }; 323 }); 324 325 t.afterAll(() => { 326 console.warn = originalConsoleWarn; 327 originalConsoleWarn = null; 328 }); 329 330 t.it('fails if called before the recording is prepared', async () => { 331 let error = null; 332 try { 333 await recordingObject.createNewLoadedSound(); 334 } catch (err) { 335 error = err; 336 } 337 t.expect(error).toBeDefined(); 338 }); 339 340 if (Platform.OS !== 'android') { 341 t.it('fails if called before the recording is started', async () => { 342 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 343 let error = null; 344 try { 345 await recordingObject.createNewLoadedSound(); 346 } catch (err) { 347 error = err; 348 } 349 t.expect(error).toBeDefined(); 350 await recordingObject.stopAndUnloadAsync(); 351 }); 352 } 353 354 t.it('fails if called before the recording is recording', async () => { 355 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 356 await recordingObject.startAsync(); 357 await waitFor(defaultRecordingDurationMillis); 358 let error = null; 359 try { 360 await recordingObject.createNewLoadedSound(); 361 } catch (err) { 362 error = err; 363 } 364 t.expect(error).toBeDefined(); 365 await recordingObject.stopAndUnloadAsync(); 366 }); 367 368 t.it('returns a sound object once the recording is done', async () => { 369 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 370 await recordingObject.startAsync(); 371 372 const recordingDuration = defaultRecordingDurationMillis; 373 await new Promise((resolve) => { 374 setTimeout(async () => { 375 await recordingObject.stopAndUnloadAsync(); 376 let error = null; 377 try { 378 const { sound } = await recordingObject.createNewLoadedSound(); 379 await retryForStatus(sound, { isBuffering: false }); 380 const status = await sound.getStatusAsync(); 381 // Web doesn't return durations in Chrome - https://bugs.chromium.org/p/chromium/issues/detail?id=642012 382 if (Platform.OS !== 'web') { 383 // Android is slow and we have to take it into account when checking recording duration. 384 t.expect(status.durationMillis).toBeGreaterThan(recordingDuration * (7 / 10)); 385 } 386 t.expect(sound).toBeDefined(); 387 } catch (err) { 388 error = err; 389 } 390 t.expect(error).toBeNull(); 391 392 resolve(); 393 }, recordingDuration); 394 }); 395 }); 396 397 if (Platform.OS === 'android') { 398 t.it('raises an error when the recording is in an unreadable format', async () => { 399 await recordingObject.prepareToRecordAsync(amrSettings); 400 await recordingObject.startAsync(); 401 402 const recordingDuration = defaultRecordingDurationMillis; 403 await new Promise((resolve) => { 404 setTimeout(async () => { 405 await recordingObject.stopAndUnloadAsync(); 406 let error = null; 407 try { 408 await recordingObject.createNewLoadedSound(); 409 } catch (err) { 410 error = err; 411 } 412 t.expect(error).toBeDefined(); 413 414 resolve(); 415 }, recordingDuration); 416 }); 417 }); 418 } 419 }); 420 421 t.describe('Recording.createNewLoadedSoundAsync()', () => { 422 t.it('fails if called before the recording is prepared', async () => { 423 let error = null; 424 try { 425 await recordingObject.createNewLoadedSoundAsync(); 426 } catch (err) { 427 error = err; 428 } 429 t.expect(error).toBeDefined(); 430 }); 431 432 if (Platform.OS !== 'android') { 433 t.it('fails if called before the recording is started', async () => { 434 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 435 let error = null; 436 try { 437 await recordingObject.createNewLoadedSoundAsync(); 438 } catch (err) { 439 error = err; 440 } 441 t.expect(error).toBeDefined(); 442 await recordingObject.stopAndUnloadAsync(); 443 }); 444 } 445 446 t.it('fails if called before the recording is recording', async () => { 447 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 448 await recordingObject.startAsync(); 449 await waitFor(defaultRecordingDurationMillis); 450 let error = null; 451 try { 452 await recordingObject.createNewLoadedSoundAsync(); 453 } catch (err) { 454 error = err; 455 } 456 t.expect(error).toBeDefined(); 457 await recordingObject.stopAndUnloadAsync(); 458 }); 459 460 t.it('returns a sound object once the recording is done', async () => { 461 await recordingObject.prepareToRecordAsync(Audio.RecordingOptionsPresets.LOW_QUALITY); 462 await recordingObject.startAsync(); 463 464 const recordingDuration = defaultRecordingDurationMillis; 465 await new Promise((resolve) => { 466 setTimeout(async () => { 467 await recordingObject.stopAndUnloadAsync(); 468 let error = null; 469 try { 470 const { sound } = await recordingObject.createNewLoadedSoundAsync(); 471 await retryForStatus(sound, { isBuffering: false }); 472 const status = await sound.getStatusAsync(); 473 474 // Web doesn't return durations in Chrome - https://bugs.chromium.org/p/chromium/issues/detail?id=642012 475 if (Platform.OS !== 'web') { 476 // Android is slow and we have to take it into account when checking recording duration. 477 t.expect(status.durationMillis).toBeGreaterThan(recordingDuration * (6 / 10)); 478 } 479 t.expect(sound).toBeDefined(); 480 } catch (err) { 481 error = err; 482 } 483 t.expect(error).toBeNull(); 484 485 resolve(); 486 }, recordingDuration); 487 }); 488 }); 489 490 if (Platform.OS === 'android') { 491 t.it('raises an error when the recording is in an unreadable format', async () => { 492 await recordingObject.prepareToRecordAsync(amrSettings); 493 await recordingObject.startAsync(); 494 495 const recordingDuration = defaultRecordingDurationMillis; 496 await new Promise((resolve) => { 497 setTimeout(async () => { 498 await recordingObject.stopAndUnloadAsync(); 499 let error = null; 500 try { 501 await recordingObject.createNewLoadedSoundAsync(); 502 } catch (err) { 503 error = err; 504 } 505 t.expect(error).toBeDefined(); 506 507 resolve(); 508 }, recordingDuration); 509 }); 510 }); 511 } 512 }); 513 514 t.describe('Recording.createAsync()', () => { 515 t.afterEach(async () => { 516 await waitFor(defaultRecordingDurationMillis); 517 await recordingObject.stopAndUnloadAsync(); 518 }); 519 520 t.it('creates and starts recording', async () => { 521 const { recording } = await Audio.Recording.createAsync( 522 Audio.RecordingOptionsPresets.LOW_QUALITY 523 ); 524 recordingObject = recording; 525 await retryForStatus(recordingObject, { isRecording: true }); 526 }); 527 }); 528 }); 529} 530