1 /* 2 * Copyright (C) 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 import static org.junit.Assert.assertEquals; 20 import static org.junit.Assert.assertNotNull; 21 import static org.junit.Assert.assertTrue; 22 23 import android.annotation.IntRange; 24 import android.annotation.NonNull; 25 import android.annotation.Nullable; 26 import android.content.Context; 27 import android.content.pm.PackageManager; 28 import android.media.AudioAttributes; 29 import android.media.AudioFormat; 30 import android.media.AudioManager; 31 import android.media.AudioRecord; 32 import android.media.AudioTimestamp; 33 import android.media.AudioTrack; 34 import android.os.Looper; 35 import android.os.PersistableBundle; 36 import android.util.Log; 37 38 import androidx.test.InstrumentationRegistry; 39 40 import com.android.compatibility.common.util.CddTest; 41 import com.android.compatibility.common.util.DeviceReportLog; 42 import com.android.compatibility.common.util.ResultType; 43 import com.android.compatibility.common.util.ResultUnit; 44 45 import java.nio.ByteBuffer; 46 import java.nio.ByteOrder; 47 import java.nio.ShortBuffer; 48 49 // Used for statistics and loopers in listener tests. 50 // See AudioRecordTest.java and AudioTrack_ListenerTest.java. 51 public class AudioHelper { 52 53 // asserts key equals expected in the metrics bundle. assertMetricsKeyEquals( PersistableBundle metrics, String key, Object expected)54 public static void assertMetricsKeyEquals( 55 PersistableBundle metrics, String key, Object expected) { 56 Object actual = metrics.get(key); 57 assertEquals("metric " + key + " actual " + actual + " != " + " expected " + expected, 58 expected, actual); 59 } 60 61 // asserts key exists in the metrics bundle. assertMetricsKey(PersistableBundle metrics, String key)62 public static void assertMetricsKey(PersistableBundle metrics, String key) { 63 Object actual = metrics.get(key); 64 assertNotNull("metric " + key + " does not exist", actual); 65 } 66 67 // create sine waves or chirps for data arrays createSoundDataInByteArray(int bufferSamples, final int sampleRate, final double frequency, double sweep)68 public static byte[] createSoundDataInByteArray(int bufferSamples, final int sampleRate, 69 final double frequency, double sweep) { 70 final double rad = 2 * Math.PI * frequency / sampleRate; 71 byte[] vai = new byte[bufferSamples]; 72 sweep = Math.PI * sweep / ((double)sampleRate * vai.length); 73 for (int j = 0; j < vai.length; j++) { 74 int unsigned = (int)(Math.sin(j * (rad + j * sweep)) * Byte.MAX_VALUE) 75 + Byte.MAX_VALUE & 0xFF; 76 vai[j] = (byte) unsigned; 77 } 78 return vai; 79 } 80 createSoundDataInShortArray(int bufferSamples, final int sampleRate, final double frequency, double sweep)81 public static short[] createSoundDataInShortArray(int bufferSamples, final int sampleRate, 82 final double frequency, double sweep) { 83 final double rad = 2 * Math.PI * frequency / sampleRate; 84 short[] vai = new short[bufferSamples]; 85 sweep = Math.PI * sweep / ((double)sampleRate * vai.length); 86 for (int j = 0; j < vai.length; j++) { 87 vai[j] = (short)(Math.sin(j * (rad + j * sweep)) * Short.MAX_VALUE); 88 } 89 return vai; 90 } 91 createSoundDataInFloatArray(int bufferSamples, final int sampleRate, final double frequency, double sweep)92 public static float[] createSoundDataInFloatArray(int bufferSamples, final int sampleRate, 93 final double frequency, double sweep) { 94 final double rad = 2 * Math.PI * frequency / sampleRate; 95 float[] vaf = new float[bufferSamples]; 96 sweep = Math.PI * sweep / ((double)sampleRate * vaf.length); 97 for (int j = 0; j < vaf.length; j++) { 98 vaf[j] = (float)(Math.sin(j * (rad + j * sweep))); 99 } 100 return vaf; 101 } 102 103 /** 104 * Creates a {@link ByteBuffer} containing short values defining a sine wave or chirp sound. 105 * 106 * @param bufferSamples number of short samples in the buffer 107 * @param sampleRate of the output signal 108 * @param frequency the base frequency of the sine wave 109 * @param sweep if 0 will generate a sine wave with the given frequency otherwise a chirp sound 110 * @return a newly allocated {@link ByteBuffer} containing the described audio signal 111 */ createSoundDataInShortByteBuffer(int bufferSamples, final int sampleRate, final double frequency, double sweep)112 public static ByteBuffer createSoundDataInShortByteBuffer(int bufferSamples, 113 final int sampleRate, final double frequency, double sweep) { 114 final double rad = 2.0f * (float) Math.PI * frequency / (float) sampleRate; 115 ByteBuffer audioBuffer = ByteBuffer.allocate(bufferSamples * Short.BYTES); 116 ShortBuffer samples = audioBuffer.order(ByteOrder.nativeOrder()).asShortBuffer(); 117 sweep = Math.PI * sweep / ((double) sampleRate * bufferSamples); 118 for (int j = 0; j < bufferSamples; ++j) { 119 short vai = (short) (Math.sin(j * (rad + j * sweep)) * Short.MAX_VALUE); 120 samples.put(vai); 121 } 122 123 audioBuffer.rewind(); 124 return audioBuffer; 125 } 126 127 /** 128 * Returns a consecutive bit mask starting from the 0th bit indicating which channels 129 * are active, used for maskArray below. 130 * 131 * @param channelMask the channel mask for audio data. 132 * @param validMask the valid channels to permit (should be a subset of channelMask) but 133 * not checked. 134 * @return an integer whose consecutive bits are set for the channels that are permitted. 135 */ packMask(int channelMask, int validMask)136 private static int packMask(int channelMask, int validMask) { 137 final int channels = Integer.bitCount(channelMask); 138 if (channels == 0) { 139 throw new IllegalArgumentException("invalid channel mask " + channelMask); 140 } 141 int packMask = 0; 142 for (int i = 0; i < channels; ++i) { 143 final int lowbit = channelMask & -channelMask; 144 packMask |= (validMask & lowbit) != 0 ? (1 << i) : 0; 145 channelMask -= lowbit; 146 } 147 return packMask; 148 } 149 150 /** 151 * Zeroes out channels in an array of audio data for testing. 152 * 153 * @param array of audio data. 154 * @param channelMask representation for the audio data. 155 * @param validMask which channels are valid (other channels will be zeroed out). A subset 156 * of channelMask. 157 */ maskArray(byte[] array, int channelMask, int validMask)158 public static void maskArray(byte[] array, int channelMask, int validMask) { 159 final int packMask = packMask(channelMask, validMask); 160 final int channels = Integer.bitCount(channelMask); 161 int j = 0; 162 for (int i = 0; i < array.length; ++i) { 163 if ((packMask & (1 << j)) == 0) { 164 array[i] = 0; 165 } 166 if (++j >= channels) { 167 j = 0; 168 } 169 } 170 } 171 maskArray(short[] array, int channelMask, int validMask)172 public static void maskArray(short[] array, int channelMask, int validMask) { 173 final int packMask = packMask(channelMask, validMask); 174 final int channels = Integer.bitCount(channelMask); 175 int j = 0; 176 for (int i = 0; i < array.length; ++i) { 177 if ((packMask & (1 << j)) == 0) { 178 array[i] = 0; 179 } 180 if (++j >= channels) { 181 j = 0; 182 } 183 } 184 } 185 maskArray(float[] array, int channelMask, int validMask)186 public static void maskArray(float[] array, int channelMask, int validMask) { 187 final int packMask = packMask(channelMask, validMask); 188 final int channels = Integer.bitCount(channelMask); 189 int j = 0; 190 for (int i = 0; i < array.length; ++i) { 191 if ((packMask & (1 << j)) == 0) { 192 array[i] = 0; 193 } 194 if (++j >= channels) { 195 j = 0; 196 } 197 } 198 } 199 200 /** 201 * Create and fill a short array with complete sine waves so we can 202 * hear buffer underruns more easily. 203 */ createSineWavesShort(int numFrames, int samplesPerFrame, int numCycles, double amplitude)204 public static short[] createSineWavesShort(int numFrames, int samplesPerFrame, 205 int numCycles, double amplitude) { 206 final short[] data = new short[numFrames * samplesPerFrame]; 207 final double rad = numCycles * 2.0 * Math.PI / numFrames; 208 for (int j = 0; j < data.length;) { 209 short sample = (short)(amplitude * Math.sin(j * rad) * Short.MAX_VALUE); 210 for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) { 211 data[j++] = sample; 212 } 213 } 214 return data; 215 } 216 frameSizeFromFormat(AudioFormat format)217 public static int frameSizeFromFormat(AudioFormat format) { 218 return format.getChannelCount() 219 * format.getBytesPerSample(format.getEncoding()); 220 } 221 frameCountFromMsec(int ms, AudioFormat format)222 public static int frameCountFromMsec(int ms, AudioFormat format) { 223 return ms * format.getSampleRate() / 1000; 224 } 225 hasAudioSilentProperty()226 public static boolean hasAudioSilentProperty() { 227 String silent = null; 228 229 try { 230 silent = (String) Class.forName("android.os.SystemProperties").getMethod("get", 231 String.class).invoke(null, "ro.audio.silent"); 232 } catch (Exception e) { 233 // pass through 234 } 235 236 if (silent != null && silent.equals("1")) { 237 return true; 238 } 239 240 return false; 241 } 242 243 public static class Statistics { add(double value)244 public void add(double value) { 245 final double absValue = Math.abs(value); 246 mSum += value; 247 mSumAbs += absValue; 248 mMaxAbs = Math.max(mMaxAbs, absValue); 249 ++mCount; 250 } 251 getAvg()252 public double getAvg() { 253 if (mCount == 0) { 254 return 0; 255 } 256 return mSum / mCount; 257 } 258 getAvgAbs()259 public double getAvgAbs() { 260 if (mCount == 0) { 261 return 0; 262 } 263 return mSumAbs / mCount; 264 } 265 getMaxAbs()266 public double getMaxAbs() { 267 return mMaxAbs; 268 } 269 270 private int mCount = 0; 271 private double mSum = 0; 272 private double mSumAbs = 0; 273 private double mMaxAbs = 0; 274 } 275 276 // for listener tests 277 // lightweight java.util.concurrent.Future* 278 public static class FutureLatch<T> 279 { 280 private T mValue; 281 private boolean mSet; set(T value)282 public void set(T value) 283 { 284 synchronized (this) { 285 assert !mSet; 286 mValue = value; 287 mSet = true; 288 notify(); 289 } 290 } get()291 public T get() 292 { 293 T value; 294 synchronized (this) { 295 while (!mSet) { 296 try { 297 wait(); 298 } catch (InterruptedException e) { 299 ; 300 } 301 } 302 value = mValue; 303 } 304 return value; 305 } 306 } 307 308 // for listener tests 309 // represents a factory for T 310 public interface MakesSomething<T> 311 { makeSomething()312 T makeSomething(); 313 } 314 315 // for listener tests 316 // used to construct an object in the context of an asynchronous thread with looper 317 public static class MakeSomethingAsynchronouslyAndLoop<T> 318 { 319 private Thread mThread; 320 volatile private Looper mLooper; 321 private final MakesSomething<T> mWhatToMake; 322 MakeSomethingAsynchronouslyAndLoop(MakesSomething<T> whatToMake)323 public MakeSomethingAsynchronouslyAndLoop(MakesSomething<T> whatToMake) 324 { 325 assert whatToMake != null; 326 mWhatToMake = whatToMake; 327 } 328 make()329 public T make() 330 { 331 final FutureLatch<T> futureLatch = new FutureLatch<T>(); 332 mThread = new Thread() 333 { 334 @Override 335 public void run() 336 { 337 Looper.prepare(); 338 mLooper = Looper.myLooper(); 339 T something = mWhatToMake.makeSomething(); 340 futureLatch.set(something); 341 Looper.loop(); 342 } 343 }; 344 mThread.start(); 345 return futureLatch.get(); 346 } join()347 public void join() 348 { 349 mLooper.quit(); 350 try { 351 mThread.join(); 352 } catch (InterruptedException e) { 353 ; 354 } 355 // avoid dangling references 356 mLooper = null; 357 mThread = null; 358 } 359 } 360 outChannelMaskFromInChannelMask(int channelMask)361 public static int outChannelMaskFromInChannelMask(int channelMask) { 362 switch (channelMask) { 363 case AudioFormat.CHANNEL_IN_MONO: 364 return AudioFormat.CHANNEL_OUT_MONO; 365 case AudioFormat.CHANNEL_IN_STEREO: 366 return AudioFormat.CHANNEL_OUT_STEREO; 367 default: 368 return AudioFormat.CHANNEL_INVALID; 369 } 370 } 371 372 @CddTest(requirement="5.10/C-1-6,C-1-7") 373 public static class TimestampVerifier { 374 375 // CDD 5.6 1ms timestamp accuracy 376 // a validity check 377 private static final double TEST_MAX_JITTER_MS_ALLOWED = isWatch() ? 23. : 6.; 378 // flaky tolerance 3x 379 private static final double TEST_STD_JITTER_MS_ALLOWED = isWatch() ? 7. : 3.; 380 // CDD requirement warning 381 private static final double TEST_STD_JITTER_MS_WARN = 1.; 382 383 // CDD 5.6 100ms track startup latency 384 private static final double TEST_STARTUP_TIME_MS_ALLOWED = 500.; // error 385 private final double TEST_STARTUP_TIME_MS_WARN; // warning 386 private static final double TEST_STARTUP_TIME_MS_INFO = 100.; // informational 387 388 private static final int MILLIS_PER_SECOND = 1000; 389 private static final long NANOS_PER_MILLISECOND = 1000000; 390 private static final long NANOS_PER_SECOND = NANOS_PER_MILLISECOND * MILLIS_PER_SECOND; 391 private static final String REPORT_LOG_NAME = "CtsMediaTestCases"; 392 393 private final String mTag; 394 private final int mSampleRate; 395 private final long mStartFrames; // initial timestamp condition for verification. 396 397 // Running statistics 398 private int mCount = 0; 399 private long mLastFrames = 0; 400 private long mLastTimeNs = 0; 401 private int mJitterCount = 0; 402 private double mMeanJitterMs = 0.; 403 private double mSecondMomentJitterMs = 0.; 404 private double mMaxAbsJitterMs = 0.; 405 private int mWarmupCount = 0; 406 TimestampVerifier(@ullable String tag, @IntRange(from=4000) int sampleRate, long startFrames, boolean isProAudioDevice)407 public TimestampVerifier(@Nullable String tag, @IntRange(from=4000) int sampleRate, 408 long startFrames, boolean isProAudioDevice) { 409 mTag = tag; // Log accepts null 410 mSampleRate = sampleRate; 411 mStartFrames = startFrames; 412 // Warning if higher than MUST value for pro audio. Zero means ignore. 413 TEST_STARTUP_TIME_MS_WARN = isProAudioDevice ? 200. : 0.; 414 } 415 getJitterCount()416 public int getJitterCount() { return mJitterCount; } getMeanJitterMs()417 public double getMeanJitterMs() { return mMeanJitterMs; } getStdJitterMs()418 public double getStdJitterMs() { return Math.sqrt(mSecondMomentJitterMs / mJitterCount); } getMaxAbsJitterMs()419 public double getMaxAbsJitterMs() { return mMaxAbsJitterMs; } getStartTimeNs()420 public double getStartTimeNs() { 421 return mLastTimeNs - ((mLastFrames - mStartFrames) * NANOS_PER_SECOND / mSampleRate); 422 } 423 add(@onNull AudioTimestamp ts)424 public void add(@NonNull AudioTimestamp ts) { 425 final long frames = ts.framePosition; 426 final long timeNs = ts.nanoTime; 427 428 assertTrue(mTag + " timestamps must have causal time", System.nanoTime() >= timeNs); 429 430 if (mCount > 0) { // need delta info from previous iteration (skipping first) 431 final long deltaFrames = frames - mLastFrames; 432 final long deltaTimeNs = timeNs - mLastTimeNs; 433 434 if (deltaFrames == 0 && deltaTimeNs == 0) return; 435 436 final double deltaFramesNs = (double)deltaFrames * NANOS_PER_SECOND / mSampleRate; 437 final double jitterMs = (deltaTimeNs - deltaFramesNs) // actual - expected 438 * (1. / NANOS_PER_MILLISECOND); 439 440 Log.d(mTag, "frames(" + frames 441 + ") timeNs(" + timeNs 442 + ") lastframes(" + mLastFrames 443 + ") lastTimeNs(" + mLastTimeNs 444 + ") deltaFrames(" + deltaFrames 445 + ") deltaTimeNs(" + deltaTimeNs 446 + ") jitterMs(" + jitterMs + ")"); 447 assertTrue(mTag + " timestamp time should be increasing", deltaTimeNs >= 0); 448 assertTrue(mTag + " timestamp frames should be increasing", deltaFrames >= 0); 449 450 if (mLastFrames != 0) { 451 if (mWarmupCount++ > 1) { // ensure device is warmed up 452 // Welford's algorithm 453 // https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance 454 ++mJitterCount; 455 final double delta = jitterMs - mMeanJitterMs; 456 mMeanJitterMs += delta / mJitterCount; 457 final double delta2 = jitterMs - mMeanJitterMs; 458 mSecondMomentJitterMs += delta * delta2; 459 460 // jitterMs is signed, so max uses abs() here. 461 final double absJitterMs = Math.abs(jitterMs); 462 if (absJitterMs > mMaxAbsJitterMs) { 463 mMaxAbsJitterMs = absJitterMs; 464 } 465 } 466 } 467 } 468 ++mCount; 469 mLastFrames = frames; 470 mLastTimeNs = timeNs; 471 } 472 verifyAndLog(long trackStartTimeNs, @Nullable String logName)473 public void verifyAndLog(long trackStartTimeNs, @Nullable String logName) { 474 // enough timestamps? 475 assertTrue(mTag + " need at least 2 jitter measurements", mJitterCount >= 2); 476 477 // Compute startup time and std jitter. 478 final int startupTimeMs = 479 (int) ((getStartTimeNs() - trackStartTimeNs) / NANOS_PER_MILLISECOND); 480 final double stdJitterMs = getStdJitterMs(); 481 482 // Check startup time 483 assertTrue(mTag + " expect startupTimeMs " + startupTimeMs 484 + " <= " + TEST_STARTUP_TIME_MS_ALLOWED, 485 startupTimeMs <= TEST_STARTUP_TIME_MS_ALLOWED); 486 if (TEST_STARTUP_TIME_MS_WARN > 0 && startupTimeMs > TEST_STARTUP_TIME_MS_WARN) { 487 Log.w(mTag, "CDD warning: startup time " + startupTimeMs 488 + " > " + TEST_STARTUP_TIME_MS_WARN); 489 } else if (startupTimeMs > TEST_STARTUP_TIME_MS_INFO) { 490 Log.i(mTag, "CDD informational: startup time " + startupTimeMs 491 + " > " + TEST_STARTUP_TIME_MS_INFO); 492 } 493 494 // Check maximum jitter 495 assertTrue(mTag + " expect maxAbsJitterMs(" + mMaxAbsJitterMs + ") < " 496 + TEST_MAX_JITTER_MS_ALLOWED, 497 mMaxAbsJitterMs < TEST_MAX_JITTER_MS_ALLOWED); 498 499 // Check std jitter 500 if (stdJitterMs > TEST_STD_JITTER_MS_WARN) { 501 Log.w(mTag, "CDD warning: std timestamp jitter " + stdJitterMs 502 + " > " + TEST_STD_JITTER_MS_WARN); 503 } 504 assertTrue(mTag + " expect stdJitterMs " + stdJitterMs + 505 " < " + TEST_STD_JITTER_MS_ALLOWED, 506 stdJitterMs < TEST_STD_JITTER_MS_ALLOWED); 507 508 Log.d(mTag, "startupTimeMs(" + startupTimeMs 509 + ") meanJitterMs(" + mMeanJitterMs 510 + ") maxAbsJitterMs(" + mMaxAbsJitterMs 511 + ") stdJitterMs(" + stdJitterMs 512 + ")"); 513 514 // Log results if logName is provided 515 if (logName != null) { 516 DeviceReportLog log = new DeviceReportLog(REPORT_LOG_NAME, logName); 517 // ReportLog needs at least one Value and Summary. 518 log.addValue("startup_time_ms", startupTimeMs, 519 ResultType.LOWER_BETTER, ResultUnit.MS); 520 log.addValue("maximum_abs_jitter_ms", mMaxAbsJitterMs, 521 ResultType.LOWER_BETTER, ResultUnit.MS); 522 log.addValue("mean_jitter_ms", mMeanJitterMs, 523 ResultType.LOWER_BETTER, ResultUnit.MS); 524 log.setSummary("std_jitter_ms", stdJitterMs, 525 ResultType.LOWER_BETTER, ResultUnit.MS); 526 log.submit(androidx.test.platform.app.InstrumentationRegistry.getInstrumentation()); 527 } 528 } 529 getContext()530 private static Context getContext() { 531 return androidx.test.platform.app.InstrumentationRegistry.getInstrumentation() 532 .getTargetContext(); 533 } 534 isWatch()535 private static boolean isWatch() { 536 return getContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_WATCH); 537 } 538 } 539 540 /* AudioRecordAudit extends AudioRecord to allow concurrent playback 541 * of read content to an AudioTrack. This is for testing only. 542 * For general applications, it is NOT recommended to extend AudioRecord. 543 * This affects AudioRecord timing. 544 */ 545 public static class AudioRecordAudit extends AudioRecord { AudioRecordAudit(int audioSource, int sampleRate, int channelMask, int format, int bufferSize, boolean isChannelIndex)546 public AudioRecordAudit(int audioSource, int sampleRate, int channelMask, 547 int format, int bufferSize, boolean isChannelIndex) { 548 this(audioSource, sampleRate, channelMask, format, bufferSize, isChannelIndex, 549 AudioManager.STREAM_MUSIC, 500 /*delayMs*/); 550 } 551 AudioRecordAudit(int audioSource, int sampleRate, int channelMask, int format, int bufferSize, boolean isChannelIndex, int auditStreamType, int delayMs)552 public AudioRecordAudit(int audioSource, int sampleRate, int channelMask, 553 int format, int bufferSize, 554 boolean isChannelIndex, int auditStreamType, int delayMs) { 555 // without channel index masks, one could call: 556 // super(audioSource, sampleRate, channelMask, format, bufferSize); 557 super(new AudioAttributes.Builder() 558 .setInternalCapturePreset(audioSource) 559 .build(), 560 (isChannelIndex 561 ? new AudioFormat.Builder().setChannelIndexMask(channelMask) 562 : new AudioFormat.Builder().setChannelMask(channelMask)) 563 .setEncoding(format) 564 .setSampleRate(sampleRate) 565 .build(), 566 bufferSize, 567 AudioManager.AUDIO_SESSION_ID_GENERATE); 568 569 if (delayMs >= 0) { // create an AudioTrack 570 final int channelOutMask = isChannelIndex ? channelMask : 571 outChannelMaskFromInChannelMask(channelMask); 572 final int bufferOutFrames = sampleRate * delayMs / 1000; 573 final int bufferOutSamples = bufferOutFrames 574 * AudioFormat.channelCountFromOutChannelMask(channelOutMask); 575 final int bufferOutSize = bufferOutSamples 576 * AudioFormat.getBytesPerSample(format); 577 578 // Caution: delayMs too large results in buffer sizes that cannot be created. 579 mTrack = new AudioTrack.Builder() 580 .setAudioAttributes(new AudioAttributes.Builder() 581 .setLegacyStreamType(auditStreamType) 582 .build()) 583 .setAudioFormat((isChannelIndex ? 584 new AudioFormat.Builder().setChannelIndexMask(channelOutMask) : 585 new AudioFormat.Builder().setChannelMask(channelOutMask)) 586 .setEncoding(format) 587 .setSampleRate(sampleRate) 588 .build()) 589 .setBufferSizeInBytes(bufferOutSize) 590 .build(); 591 assertEquals(AudioTrack.STATE_INITIALIZED, mTrack.getState()); 592 mTrackPosition = 0; 593 mFinishAtMs = 0; 594 } 595 } 596 597 @Override read(byte[] audioData, int offsetInBytes, int sizeInBytes)598 public int read(byte[] audioData, int offsetInBytes, int sizeInBytes) { 599 // for byte array access we verify format is 8 bit PCM (typical use) 600 assertEquals(TAG + ": format mismatch", 601 AudioFormat.ENCODING_PCM_8BIT, getAudioFormat()); 602 int samples = super.read(audioData, offsetInBytes, sizeInBytes); 603 if (mTrack != null) { 604 final int result = mTrack.write(audioData, offsetInBytes, samples, 605 AudioTrack.WRITE_NON_BLOCKING); 606 mTrackPosition += result / mTrack.getChannelCount(); 607 } 608 return samples; 609 } 610 611 @Override read(byte[] audioData, int offsetInBytes, int sizeInBytes, int readMode)612 public int read(byte[] audioData, int offsetInBytes, int sizeInBytes, int readMode) { 613 // for byte array access we verify format is 8 bit PCM (typical use) 614 assertEquals(TAG + ": format mismatch", 615 AudioFormat.ENCODING_PCM_8BIT, getAudioFormat()); 616 int samples = super.read(audioData, offsetInBytes, sizeInBytes, readMode); 617 if (mTrack != null) { 618 final int result = mTrack.write(audioData, offsetInBytes, samples, 619 AudioTrack.WRITE_NON_BLOCKING); 620 mTrackPosition += result / mTrack.getChannelCount(); 621 } 622 return samples; 623 } 624 625 @Override read(short[] audioData, int offsetInShorts, int sizeInShorts)626 public int read(short[] audioData, int offsetInShorts, int sizeInShorts) { 627 // for short array access we verify format is 16 bit PCM (typical use) 628 assertEquals(TAG + ": format mismatch", 629 AudioFormat.ENCODING_PCM_16BIT, getAudioFormat()); 630 int samples = super.read(audioData, offsetInShorts, sizeInShorts); 631 if (mTrack != null) { 632 final int result = mTrack.write(audioData, offsetInShorts, samples, 633 AudioTrack.WRITE_NON_BLOCKING); 634 mTrackPosition += result / mTrack.getChannelCount(); 635 } 636 return samples; 637 } 638 639 @Override read(short[] audioData, int offsetInShorts, int sizeInShorts, int readMode)640 public int read(short[] audioData, int offsetInShorts, int sizeInShorts, int readMode) { 641 // for short array access we verify format is 16 bit PCM (typical use) 642 assertEquals(TAG + ": format mismatch", 643 AudioFormat.ENCODING_PCM_16BIT, getAudioFormat()); 644 int samples = super.read(audioData, offsetInShorts, sizeInShorts, readMode); 645 if (mTrack != null) { 646 final int result = mTrack.write(audioData, offsetInShorts, samples, 647 AudioTrack.WRITE_NON_BLOCKING); 648 mTrackPosition += result / mTrack.getChannelCount(); 649 } 650 return samples; 651 } 652 653 @Override read(float[] audioData, int offsetInFloats, int sizeInFloats, int readMode)654 public int read(float[] audioData, int offsetInFloats, int sizeInFloats, int readMode) { 655 // for float array access we verify format is float PCM (typical use) 656 assertEquals(TAG + ": format mismatch", 657 AudioFormat.ENCODING_PCM_FLOAT, getAudioFormat()); 658 int samples = super.read(audioData, offsetInFloats, sizeInFloats, readMode); 659 if (mTrack != null) { 660 final int result = mTrack.write(audioData, offsetInFloats, samples, 661 AudioTrack.WRITE_NON_BLOCKING); 662 mTrackPosition += result / mTrack.getChannelCount(); 663 } 664 return samples; 665 } 666 667 @Override read(ByteBuffer audioBuffer, int sizeInBytes)668 public int read(ByteBuffer audioBuffer, int sizeInBytes) { 669 int bytes = super.read(audioBuffer, sizeInBytes); 670 if (mTrack != null) { 671 // read does not affect position and limit of the audioBuffer. 672 // we make a duplicate to change that for writing to the output AudioTrack 673 // which does check position and limit. 674 ByteBuffer copy = audioBuffer.duplicate(); 675 copy.position(0).limit(bytes); // read places data at the start of the buffer. 676 final int result = mTrack.write(copy, bytes, AudioTrack.WRITE_NON_BLOCKING); 677 mTrackPosition += result / (mTrack.getChannelCount() 678 * AudioFormat.getBytesPerSample(mTrack.getAudioFormat())); 679 } 680 return bytes; 681 } 682 683 @Override read(ByteBuffer audioBuffer, int sizeInBytes, int readMode)684 public int read(ByteBuffer audioBuffer, int sizeInBytes, int readMode) { 685 int bytes = super.read(audioBuffer, sizeInBytes, readMode); 686 if (mTrack != null) { 687 // read does not affect position and limit of the audioBuffer. 688 // we make a duplicate to change that for writing to the output AudioTrack 689 // which does check position and limit. 690 ByteBuffer copy = audioBuffer.duplicate(); 691 copy.position(0).limit(bytes); // read places data at the start of the buffer. 692 final int result = mTrack.write(copy, bytes, AudioTrack.WRITE_NON_BLOCKING); 693 mTrackPosition += result / (mTrack.getChannelCount() 694 * AudioFormat.getBytesPerSample(mTrack.getAudioFormat())); 695 } 696 return bytes; 697 } 698 699 @Override startRecording()700 public void startRecording() { 701 super.startRecording(); 702 if (mTrack != null) { 703 mTrack.play(); 704 } 705 } 706 707 @Override stop()708 public void stop() { 709 super.stop(); 710 if (mTrack != null) { 711 if (mTrackPosition > 0) { // stop may be called multiple times. 712 final int remainingFrames = mTrackPosition - mTrack.getPlaybackHeadPosition(); 713 mFinishAtMs = System.currentTimeMillis() 714 + remainingFrames * 1000 / mTrack.getSampleRate(); 715 mTrackPosition = 0; 716 } 717 mTrack.stop(); // allows remaining data to play out 718 } 719 } 720 721 @Override release()722 public void release() { 723 super.release(); 724 if (mTrack != null) { 725 final long remainingMs = mFinishAtMs - System.currentTimeMillis(); 726 if (remainingMs > 0) { 727 try { 728 Thread.sleep(remainingMs); 729 } catch (InterruptedException e) { 730 ; 731 } 732 } 733 mTrack.release(); 734 mTrack = null; 735 } 736 } 737 738 public AudioTrack mTrack; 739 private final static String TAG = "AudioRecordAudit"; 740 private int mTrackPosition; 741 private long mFinishAtMs; 742 } 743 } 744