1 /*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 //#define LOG_NDEBUG 0
17
18 #define LOG_TAG "AudioTrack-JNI"
19
20 #include "android_media_AudioTrack.h"
21
22 #include <android-base/macros.h>
23 #include <android_os_Parcel.h>
24 #include <binder/MemoryBase.h>
25 #include <binder/MemoryHeapBase.h>
26 #include <media/AudioParameter.h>
27 #include <media/AudioSystem.h>
28 #include <media/AudioTrack.h>
29 #include <nativehelper/JNIHelp.h>
30 #include <nativehelper/ScopedUtfChars.h>
31 #include <utils/Log.h>
32
33 #include <cinttypes>
34
35 #include "android_media_AudioAttributes.h"
36 #include "android_media_AudioErrors.h"
37 #include "android_media_AudioFormat.h"
38 #include "android_media_AudioTrackCallback.h"
39 #include "android_media_DeviceCallback.h"
40 #include "android_media_JNIUtils.h"
41 #include "android_media_MediaMetricsJNI.h"
42 #include "android_media_PlaybackParams.h"
43 #include "android_media_VolumeShaper.h"
44 #include "core_jni_helpers.h"
45
46 // ----------------------------------------------------------------------------
47
48 using namespace android;
49
50 using ::android::media::VolumeShaper;
51
52 // ----------------------------------------------------------------------------
53 static const char* const kClassPathName = "android/media/AudioTrack";
54
55 struct audio_track_fields_t {
56 // these fields provide access from C++ to the...
57 jmethodID postNativeEventInJava; //... event post callback method
58 jfieldID nativeTrackInJavaObj; // stores in Java the native AudioTrack object
59 jfieldID jniData; // stores in Java additional resources used by the native AudioTrack
60 jfieldID fieldStreamType; // ... mStreamType field in the AudioTrack Java object
61 };
62 static audio_track_fields_t javaAudioTrackFields;
63 static PlaybackParams::fields_t gPlaybackParamsFields;
64 static VolumeShaperHelper::fields_t gVolumeShaperFields;
65
66 class AudioTrackCallbackImpl : public AudioTrack::IAudioTrackCallback {
67 public:
68 enum event_type {
69 // Keep in sync with java
70 EVENT_MORE_DATA = 0,
71 EVENT_UNDERRUN = 1,
72 EVENT_LOOP_END = 2,
73 EVENT_MARKER = 3,
74 EVENT_NEW_POS = 4,
75 EVENT_BUFFER_END = 5,
76 EVENT_NEW_IAUDIOTRACK = 6,
77 EVENT_STREAM_END = 7,
78 // 8 is reserved for future use
79 EVENT_CAN_WRITE_MORE_DATA = 9
80 };
81
AudioTrackCallbackImpl(jclass audioTrackClass,jobject audioTrackWeakRef,bool isOffload)82 AudioTrackCallbackImpl(jclass audioTrackClass, jobject audioTrackWeakRef, bool isOffload)
83 : mIsOffload(isOffload)
84 {
85 const auto env = getJNIEnvOrDie();
86 mAudioTrackClass = (jclass)env->NewGlobalRef(audioTrackClass);
87 // we use a weak reference so the AudioTrack object can be garbage collected.
88 mAudioTrackWeakRef = env->NewGlobalRef(audioTrackWeakRef);
89
90 }
91
92 AudioTrackCallbackImpl(const AudioTrackCallbackImpl&) = delete;
93 AudioTrackCallbackImpl& operator=(const AudioTrackCallbackImpl&) = delete;
~AudioTrackCallbackImpl()94 ~AudioTrackCallbackImpl() {
95 const auto env = getJNIEnvOrDie();
96 env->DeleteGlobalRef(mAudioTrackClass);
97 env->DeleteGlobalRef(mAudioTrackWeakRef);
98 }
99
onCanWriteMoreData(const AudioTrack::Buffer & buffer)100 size_t onCanWriteMoreData(const AudioTrack::Buffer& buffer) override {
101 if (!mIsOffload) {
102 LOG_FATAL("Received canWrite callback for non-offload track");
103 return 0;
104 }
105 const size_t availableForWrite = buffer.size();
106 const int arg = availableForWrite > INT32_MAX ? INT32_MAX : (int) availableForWrite;
107 postEvent(EVENT_CAN_WRITE_MORE_DATA, arg);
108 return 0;
109 }
110
onMarker(uint32_t markerPosition)111 void onMarker([[maybe_unused]] uint32_t markerPosition) override {
112 postEvent(EVENT_MARKER);
113 }
onNewPos(uint32_t newPos)114 void onNewPos([[maybe_unused]] uint32_t newPos) override {
115 postEvent(EVENT_NEW_POS);
116 }
117
118
onNewIAudioTrack()119 void onNewIAudioTrack() override {
120 if (!mIsOffload) return;
121 postEvent(EVENT_NEW_IAUDIOTRACK);
122 }
123
onStreamEnd()124 void onStreamEnd() override {
125 if (!mIsOffload) return;
126 postEvent(EVENT_STREAM_END);
127 }
128
129 protected:
130 jobject mAudioTrackWeakRef;
131 private:
132
postEvent(int event,int arg=0)133 void postEvent(int event, int arg = 0) {
134 auto env = getJNIEnvOrDie();
135 env->CallStaticVoidMethod(
136 mAudioTrackClass,
137 javaAudioTrackFields.postNativeEventInJava,
138 mAudioTrackWeakRef, event, arg, 0, NULL);
139 if (env->ExceptionCheck()) {
140 env->ExceptionDescribe();
141 env->ExceptionClear();
142 }
143 }
144
145 jclass mAudioTrackClass;
146 const bool mIsOffload;
147 };
148
149 // keep these values in sync with AudioTrack.java
150 #define MODE_STATIC 0
151 #define MODE_STREAM 1
152
153 // ----------------------------------------------------------------------------
154 class AudioTrackJniStorage : public virtual RefBase,
155 public AudioTrackCallbackImpl
156 {
157 public:
158 // TODO do we always want to initialize the callback implementation?
AudioTrackJniStorage(jclass audioTrackClass,jobject audioTrackRef,bool isOffload=false)159 AudioTrackJniStorage(jclass audioTrackClass, jobject audioTrackRef, bool isOffload = false)
160 : AudioTrackCallbackImpl(audioTrackClass, audioTrackRef, isOffload) {}
161
162 sp<JNIDeviceCallback> mDeviceCallback;
163 sp<JNIAudioTrackCallback> mAudioTrackCallback;
164
getAudioTrackWeakRef() const165 jobject getAudioTrackWeakRef() const {
166 return mAudioTrackWeakRef;
167 }
168
169 };
170
171 class TunerConfigurationHelper {
172 JNIEnv *const mEnv;
173 jobject const mTunerConfiguration;
174
175 struct Ids {
IdsTunerConfigurationHelper::Ids176 Ids(JNIEnv *env)
177 : mClass(FindClassOrDie(env, "android/media/AudioTrack$TunerConfiguration")),
178 mContentId(GetFieldIDOrDie(env, mClass, "mContentId", "I")),
179 mSyncId(GetFieldIDOrDie(env, mClass, "mSyncId", "I")) {}
180 const jclass mClass;
181 const jfieldID mContentId;
182 const jfieldID mSyncId;
183 };
184
getIds(JNIEnv * env)185 static const Ids &getIds(JNIEnv *env) {
186 // Meyer's singleton, initializes first time control passes through
187 // declaration in a block and is thread-safe per ISO/IEC 14882:2011 6.7.4.
188 static Ids ids(env);
189 return ids;
190 }
191
192 public:
TunerConfigurationHelper(JNIEnv * env,jobject tunerConfiguration)193 TunerConfigurationHelper(JNIEnv *env, jobject tunerConfiguration)
194 : mEnv(env), mTunerConfiguration(tunerConfiguration) {}
195
getContentId() const196 int32_t getContentId() const {
197 if (mEnv == nullptr || mTunerConfiguration == nullptr) return 0;
198 const Ids &ids = getIds(mEnv);
199 return (int32_t)mEnv->GetIntField(mTunerConfiguration, ids.mContentId);
200 }
201
getSyncId() const202 int32_t getSyncId() const {
203 if (mEnv == nullptr || mTunerConfiguration == nullptr) return 0;
204 const Ids &ids = getIds(mEnv);
205 return (int32_t)mEnv->GetIntField(mTunerConfiguration, ids.mSyncId);
206 }
207
208 // optional check to confirm class and field ids can be found.
initCheckOrDie(JNIEnv * env)209 static void initCheckOrDie(JNIEnv *env) { (void)getIds(env); }
210 };
211
212
213 // ----------------------------------------------------------------------------
214 #define DEFAULT_OUTPUT_SAMPLE_RATE 44100
215
216 #define AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM (-16)
217 #define AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK (-17)
218 #define AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT (-18)
219 #define AUDIOTRACK_ERROR_SETUP_INVALIDSTREAMTYPE (-19)
220 #define AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED (-20)
221
222 namespace {
allocSharedMem(int sizeInBytes)223 sp<IMemory> allocSharedMem(int sizeInBytes) {
224 const auto heap = sp<MemoryHeapBase>::make(sizeInBytes, 0, "AudioTrack Heap Base");
225 if (heap->getBase() == MAP_FAILED || heap->getBase() == nullptr) {
226 return nullptr;
227 }
228 return sp<MemoryBase>::make(heap, 0, sizeInBytes);
229 }
230
getAudioTrack(JNIEnv * env,jobject thiz)231 sp<AudioTrack> getAudioTrack(JNIEnv* env, jobject thiz) {
232 return getFieldSp<AudioTrack>(env, thiz, javaAudioTrackFields.nativeTrackInJavaObj);
233 }
234
235 } // anonymous
236 // ----------------------------------------------------------------------------
237 // For MediaSync
android_media_AudioTrack_getAudioTrack(JNIEnv * env,jobject audioTrackObj)238 sp<AudioTrack> android_media_AudioTrack_getAudioTrack(JNIEnv* env, jobject audioTrackObj) {
239 return getAudioTrack(env, audioTrackObj);
240 }
241
242 // ----------------------------------------------------------------------------
android_media_AudioTrack_setup(JNIEnv * env,jobject thiz,jobject weak_this,jobject jaa,jintArray jSampleRate,jint channelPositionMask,jint channelIndexMask,jint audioFormat,jint buffSizeInBytes,jint memoryMode,jintArray jSession,jobject jAttributionSource,jlong nativeAudioTrack,jboolean offload,jint encapsulationMode,jobject tunerConfiguration,jstring opPackageName)243 static jint android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this,
244 jobject jaa, jintArray jSampleRate,
245 jint channelPositionMask, jint channelIndexMask,
246 jint audioFormat, jint buffSizeInBytes, jint memoryMode,
247 jintArray jSession, jobject jAttributionSource,
248 jlong nativeAudioTrack, jboolean offload,
249 jint encapsulationMode, jobject tunerConfiguration,
250 jstring opPackageName) {
251 ALOGV("sampleRates=%p, channel mask=%x, index mask=%x, audioFormat(Java)=%d, buffSize=%d,"
252 " nativeAudioTrack=0x%" PRIX64 ", offload=%d encapsulationMode=%d tuner=%p",
253 jSampleRate, channelPositionMask, channelIndexMask, audioFormat, buffSizeInBytes,
254 nativeAudioTrack, offload, encapsulationMode, tunerConfiguration);
255
256 if (jSession == NULL) {
257 ALOGE("Error creating AudioTrack: invalid session ID pointer");
258 return (jint) AUDIO_JAVA_ERROR;
259 }
260
261 const TunerConfigurationHelper tunerHelper(env, tunerConfiguration);
262
263 jint* nSession = env->GetIntArrayElements(jSession, nullptr /* isCopy */);
264 if (nSession == NULL) {
265 ALOGE("Error creating AudioTrack: Error retrieving session id pointer");
266 return (jint) AUDIO_JAVA_ERROR;
267 }
268 audio_session_t sessionId = (audio_session_t) nSession[0];
269 env->ReleaseIntArrayElements(jSession, nSession, 0 /* mode */);
270 nSession = NULL;
271
272
273 jclass clazz = env->GetObjectClass(thiz);
274 if (clazz == NULL) {
275 ALOGE("Can't find %s when setting up callback.", kClassPathName);
276 return (jint) AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED;
277 }
278
279 // if we pass in an existing *Native* AudioTrack, we don't need to create/initialize one.
280 sp<AudioTrack> lpTrack;
281 const auto lpJniStorage = sp<AudioTrackJniStorage>::make(clazz, weak_this, offload);
282 if (nativeAudioTrack == 0) {
283 if (jaa == 0) {
284 ALOGE("Error creating AudioTrack: invalid audio attributes");
285 return (jint) AUDIO_JAVA_ERROR;
286 }
287
288 if (jSampleRate == 0) {
289 ALOGE("Error creating AudioTrack: invalid sample rates");
290 return (jint) AUDIO_JAVA_ERROR;
291 }
292
293 int* sampleRates = env->GetIntArrayElements(jSampleRate, NULL);
294 int sampleRateInHertz = sampleRates[0];
295 env->ReleaseIntArrayElements(jSampleRate, sampleRates, JNI_ABORT);
296
297 // Invalid channel representations are caught by !audio_is_output_channel() below.
298 audio_channel_mask_t nativeChannelMask = nativeChannelMaskFromJavaChannelMasks(
299 channelPositionMask, channelIndexMask);
300 if (!audio_is_output_channel(nativeChannelMask)) {
301 ALOGE("Error creating AudioTrack: invalid native channel mask %#x.", nativeChannelMask);
302 return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK;
303 }
304
305 uint32_t channelCount = audio_channel_count_from_out_mask(nativeChannelMask);
306
307 // check the format.
308 // This function was called from Java, so we compare the format against the Java constants
309 audio_format_t format = audioFormatToNative(audioFormat);
310 if (format == AUDIO_FORMAT_INVALID) {
311 ALOGE("Error creating AudioTrack: unsupported audio format %d.", audioFormat);
312 return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT;
313 }
314
315 // compute the frame count
316 size_t frameCount;
317 if (audio_has_proportional_frames(format)) {
318 const size_t bytesPerSample = audio_bytes_per_sample(format);
319 frameCount = buffSizeInBytes / (channelCount * bytesPerSample);
320 } else {
321 frameCount = buffSizeInBytes;
322 }
323
324 // create the native AudioTrack object
325 ScopedUtfChars opPackageNameStr(env, opPackageName);
326
327 android::content::AttributionSourceState attributionSource;
328 attributionSource.readFromParcel(parcelForJavaObject(env, jAttributionSource));
329 lpTrack = sp<AudioTrack>::make(attributionSource);
330
331 // read the AudioAttributes values
332 auto paa = JNIAudioAttributeHelper::makeUnique();
333 jint jStatus = JNIAudioAttributeHelper::nativeFromJava(env, jaa, paa.get());
334 if (jStatus != (jint)AUDIO_JAVA_SUCCESS) {
335 return jStatus;
336 }
337 ALOGV("AudioTrack_setup for usage=%d content=%d flags=0x%#x tags=%s",
338 paa->usage, paa->content_type, paa->flags, paa->tags);
339
340 // initialize the callback information:
341 // this data will be passed with every AudioTrack callback
342 audio_offload_info_t offloadInfo;
343 if (offload == JNI_TRUE) {
344 offloadInfo = AUDIO_INFO_INITIALIZER;
345 offloadInfo.format = format;
346 offloadInfo.sample_rate = sampleRateInHertz;
347 offloadInfo.channel_mask = nativeChannelMask;
348 offloadInfo.has_video = false;
349 offloadInfo.stream_type = AUDIO_STREAM_MUSIC; //required for offload
350 }
351
352 if (encapsulationMode != 0) {
353 offloadInfo = AUDIO_INFO_INITIALIZER;
354 offloadInfo.format = format;
355 offloadInfo.sample_rate = sampleRateInHertz;
356 offloadInfo.channel_mask = nativeChannelMask;
357 offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
358 offloadInfo.encapsulation_mode =
359 static_cast<audio_encapsulation_mode_t>(encapsulationMode);
360 offloadInfo.content_id = tunerHelper.getContentId();
361 offloadInfo.sync_id = tunerHelper.getSyncId();
362 }
363
364 // initialize the native AudioTrack object
365 status_t status = NO_ERROR;
366 switch (memoryMode) {
367 case MODE_STREAM:
368 status = lpTrack->set(AUDIO_STREAM_DEFAULT, // stream type, but more info conveyed
369 // in paa (last argument)
370 sampleRateInHertz,
371 format, // word length, PCM
372 nativeChannelMask, offload ? 0 : frameCount,
373 offload ? AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD
374 : AUDIO_OUTPUT_FLAG_NONE,
375 lpJniStorage,
376 0, // notificationFrames == 0 since not using EVENT_MORE_DATA
377 // to feed the AudioTrack
378 0, // shared mem
379 true, // thread can call Java
380 sessionId, // audio session ID
381 offload ? AudioTrack::TRANSFER_SYNC_NOTIF_CALLBACK
382 : AudioTrack::TRANSFER_SYNC,
383 (offload || encapsulationMode) ? &offloadInfo : NULL,
384 attributionSource, // Passed from Java
385 paa.get());
386 break;
387
388 case MODE_STATIC:
389 {
390 // AudioTrack is using shared memory
391 const auto iMem = allocSharedMem(buffSizeInBytes);
392 if (iMem == nullptr) {
393 ALOGE("Error creating AudioTrack in static mode: error creating mem heap base");
394 goto native_init_failure;
395 }
396
397 status = lpTrack->set(AUDIO_STREAM_DEFAULT, // stream type, but more info conveyed
398 // in paa (last argument)
399 sampleRateInHertz,
400 format, // word length, PCM
401 nativeChannelMask, frameCount, AUDIO_OUTPUT_FLAG_NONE,
402 lpJniStorage,
403 0, // notificationFrames == 0 since not using EVENT_MORE_DATA
404 // to feed the AudioTrack
405 iMem, // shared mem
406 true, // thread can call Java
407 sessionId, // audio session ID
408 AudioTrack::TRANSFER_SHARED,
409 nullptr, // default offloadInfo
410 attributionSource, // Passed from Java
411 paa.get());
412 break;
413 }
414 default:
415 ALOGE("Unknown mode %d", memoryMode);
416 goto native_init_failure;
417 }
418
419 if (status != NO_ERROR) {
420 ALOGE("Error %d initializing AudioTrack", status);
421 goto native_init_failure;
422 }
423 // Set caller name so it can be logged in destructor.
424 // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_JAVA
425 lpTrack->setCallerName("java");
426 } else { // end if (nativeAudioTrack == 0)
427 lpTrack = sp<AudioTrack>::fromExisting(reinterpret_cast<AudioTrack*>(nativeAudioTrack));
428 // TODO: We need to find out which members of the Java AudioTrack might
429 // need to be initialized from the Native AudioTrack
430 // these are directly returned from getters:
431 // mSampleRate
432 // mAudioFormat
433 // mStreamType
434 // mChannelConfiguration
435 // mChannelCount
436 // mState (?)
437 // mPlayState (?)
438 // these may be used internally (Java AudioTrack.audioParamCheck():
439 // mChannelMask
440 // mChannelIndexMask
441 // mDataLoadMode
442
443 // initialize the callback information:
444 // this data will be passed with every AudioTrack callback
445
446 // TODO this callback information is useless, it isn't passed to the
447 // native AudioTrack object
448 /*
449 lpJniStorage->mCallbackData.audioTrack_class = (jclass)env->NewGlobalRef(clazz);
450 // we use a weak reference so the AudioTrack object can be garbage collected.
451 lpJniStorage->mCallbackData.audioTrack_ref = env->NewGlobalRef(weak_this);
452 lpJniStorage->mCallbackData.busy = false;
453 */
454 }
455 lpJniStorage->mAudioTrackCallback =
456 sp<JNIAudioTrackCallback>::make(env, thiz, lpJniStorage->getAudioTrackWeakRef(),
457 javaAudioTrackFields.postNativeEventInJava);
458 lpTrack->setAudioTrackCallback(lpJniStorage->mAudioTrackCallback);
459
460 nSession = env->GetIntArrayElements(jSession, nullptr /* isCopy */);
461 if (nSession == NULL) {
462 ALOGE("Error creating AudioTrack: Error retrieving session id pointer");
463 goto native_init_failure;
464 }
465 // read the audio session ID back from AudioTrack in case we create a new session
466 nSession[0] = lpTrack->getSessionId();
467 env->ReleaseIntArrayElements(jSession, nSession, 0 /* mode */);
468 nSession = NULL;
469
470 {
471 const jint elements[1] = { (jint) lpTrack->getSampleRate() };
472 env->SetIntArrayRegion(jSampleRate, 0, 1, elements);
473 }
474
475 // save our newly created C++ AudioTrack in the "nativeTrackInJavaObj" field
476 // of the Java object (in mNativeTrackInJavaObj)
477 setFieldSp(env, thiz, lpTrack, javaAudioTrackFields.nativeTrackInJavaObj);
478
479 // save the JNI resources so we can free them later
480 //ALOGV("storing lpJniStorage: %x\n", (long)lpJniStorage);
481 setFieldSp(env, thiz, lpJniStorage, javaAudioTrackFields.jniData);
482
483 // since we had audio attributes, the stream type was derived from them during the
484 // creation of the native AudioTrack: push the same value to the Java object
485 env->SetIntField(thiz, javaAudioTrackFields.fieldStreamType, (jint) lpTrack->streamType());
486
487 return (jint) AUDIO_JAVA_SUCCESS;
488
489 // failures:
490 native_init_failure:
491 if (nSession != NULL) {
492 env->ReleaseIntArrayElements(jSession, nSession, 0 /* mode */);
493 }
494
495 setFieldSp(env, thiz, sp<AudioTrack>{}, javaAudioTrackFields.nativeTrackInJavaObj);
496 setFieldSp(env, thiz, sp<AudioTrackJniStorage>{}, javaAudioTrackFields.jniData);
497 // lpTrack goes out of scope, so reference count drops to zero
498 return (jint) AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED;
499 }
500
501 // ----------------------------------------------------------------------------
502 static jboolean
android_media_AudioTrack_is_direct_output_supported(JNIEnv * env,jobject thiz,jint encoding,jint sampleRate,jint channelMask,jint channelIndexMask,jint contentType,jint usage,jint flags)503 android_media_AudioTrack_is_direct_output_supported(JNIEnv *env, jobject thiz,
504 jint encoding, jint sampleRate,
505 jint channelMask, jint channelIndexMask,
506 jint contentType, jint usage, jint flags) {
507 audio_config_base_t config = {};
508 audio_attributes_t attributes = {};
509 config.format = static_cast<audio_format_t>(audioFormatToNative(encoding));
510 config.sample_rate = static_cast<uint32_t>(sampleRate);
511 config.channel_mask = nativeChannelMaskFromJavaChannelMasks(channelMask, channelIndexMask);
512 attributes.content_type = static_cast<audio_content_type_t>(contentType);
513 attributes.usage = static_cast<audio_usage_t>(usage);
514 attributes.flags = static_cast<audio_flags_mask_t>(flags);
515 // ignore source and tags attributes as they don't affect querying whether output is supported
516 return AudioTrack::isDirectOutputSupported(config, attributes);
517 }
518
519 // ----------------------------------------------------------------------------
520 static void
android_media_AudioTrack_start(JNIEnv * env,jobject thiz)521 android_media_AudioTrack_start(JNIEnv *env, jobject thiz)
522 {
523 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
524 if (lpTrack == NULL) {
525 jniThrowException(env, "java/lang/IllegalStateException",
526 "Unable to retrieve AudioTrack pointer for start()");
527 return;
528 }
529
530 lpTrack->start();
531 }
532
533
534 // ----------------------------------------------------------------------------
535 static void
android_media_AudioTrack_stop(JNIEnv * env,jobject thiz)536 android_media_AudioTrack_stop(JNIEnv *env, jobject thiz)
537 {
538 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
539 if (lpTrack == NULL) {
540 jniThrowException(env, "java/lang/IllegalStateException",
541 "Unable to retrieve AudioTrack pointer for stop()");
542 return;
543 }
544
545 lpTrack->stop();
546 }
547
548
549 // ----------------------------------------------------------------------------
550 static void
android_media_AudioTrack_pause(JNIEnv * env,jobject thiz)551 android_media_AudioTrack_pause(JNIEnv *env, jobject thiz)
552 {
553 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
554 if (lpTrack == NULL) {
555 jniThrowException(env, "java/lang/IllegalStateException",
556 "Unable to retrieve AudioTrack pointer for pause()");
557 return;
558 }
559
560 lpTrack->pause();
561 }
562
563
564 // ----------------------------------------------------------------------------
565 static void
android_media_AudioTrack_flush(JNIEnv * env,jobject thiz)566 android_media_AudioTrack_flush(JNIEnv *env, jobject thiz)
567 {
568 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
569 if (lpTrack == NULL) {
570 jniThrowException(env, "java/lang/IllegalStateException",
571 "Unable to retrieve AudioTrack pointer for flush()");
572 return;
573 }
574
575 lpTrack->flush();
576 }
577
578 // ----------------------------------------------------------------------------
579 static void
android_media_AudioTrack_set_volume(JNIEnv * env,jobject thiz,jfloat leftVol,jfloat rightVol)580 android_media_AudioTrack_set_volume(JNIEnv *env, jobject thiz, jfloat leftVol, jfloat rightVol )
581 {
582 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
583 if (lpTrack == NULL) {
584 jniThrowException(env, "java/lang/IllegalStateException",
585 "Unable to retrieve AudioTrack pointer for setVolume()");
586 return;
587 }
588
589 lpTrack->setVolume(leftVol, rightVol);
590 }
591
592 // ----------------------------------------------------------------------------
593
android_media_AudioTrack_release(JNIEnv * env,jobject thiz)594 static void android_media_AudioTrack_release(JNIEnv *env, jobject thiz) {
595 setFieldSp(env, thiz, sp<AudioTrack>(nullptr), javaAudioTrackFields.nativeTrackInJavaObj);
596 setFieldSp(env, thiz, sp<AudioTrackJniStorage>(nullptr), javaAudioTrackFields.jniData);
597 }
598
599
600 // ----------------------------------------------------------------------------
android_media_AudioTrack_finalize(JNIEnv * env,jobject thiz)601 static void android_media_AudioTrack_finalize(JNIEnv *env, jobject thiz) {
602 //ALOGV("android_media_AudioTrack_finalize jobject: %x\n", (int)thiz);
603 android_media_AudioTrack_release(env, thiz);
604 }
605
606 // overloaded JNI array helper functions (same as in android_media_AudioRecord)
607 static inline
envGetArrayElements(JNIEnv * env,jbyteArray array,jboolean * isCopy)608 jbyte *envGetArrayElements(JNIEnv *env, jbyteArray array, jboolean *isCopy) {
609 return env->GetByteArrayElements(array, isCopy);
610 }
611
612 static inline
envReleaseArrayElements(JNIEnv * env,jbyteArray array,jbyte * elems,jint mode)613 void envReleaseArrayElements(JNIEnv *env, jbyteArray array, jbyte *elems, jint mode) {
614 env->ReleaseByteArrayElements(array, elems, mode);
615 }
616
617 static inline
envGetArrayElements(JNIEnv * env,jshortArray array,jboolean * isCopy)618 jshort *envGetArrayElements(JNIEnv *env, jshortArray array, jboolean *isCopy) {
619 return env->GetShortArrayElements(array, isCopy);
620 }
621
622 static inline
envReleaseArrayElements(JNIEnv * env,jshortArray array,jshort * elems,jint mode)623 void envReleaseArrayElements(JNIEnv *env, jshortArray array, jshort *elems, jint mode) {
624 env->ReleaseShortArrayElements(array, elems, mode);
625 }
626
627 static inline
envGetArrayElements(JNIEnv * env,jfloatArray array,jboolean * isCopy)628 jfloat *envGetArrayElements(JNIEnv *env, jfloatArray array, jboolean *isCopy) {
629 return env->GetFloatArrayElements(array, isCopy);
630 }
631
632 static inline
envReleaseArrayElements(JNIEnv * env,jfloatArray array,jfloat * elems,jint mode)633 void envReleaseArrayElements(JNIEnv *env, jfloatArray array, jfloat *elems, jint mode) {
634 env->ReleaseFloatArrayElements(array, elems, mode);
635 }
636
637 static inline
interpretWriteSizeError(ssize_t writeSize)638 jint interpretWriteSizeError(ssize_t writeSize) {
639 if (writeSize == WOULD_BLOCK) {
640 return (jint)0;
641 } else if (writeSize == NO_INIT) {
642 return AUDIO_JAVA_DEAD_OBJECT;
643 } else {
644 ALOGE("Error %zd during AudioTrack native read", writeSize);
645 return nativeToJavaStatus(writeSize);
646 }
647 }
648
649 // ----------------------------------------------------------------------------
650 template <typename T>
writeToTrack(const sp<AudioTrack> & track,jint audioFormat,const T * data,jint offsetInSamples,jint sizeInSamples,bool blocking)651 static jint writeToTrack(const sp<AudioTrack>& track, jint audioFormat, const T *data,
652 jint offsetInSamples, jint sizeInSamples, bool blocking) {
653 // give the data to the native AudioTrack object (the data starts at the offset)
654 ssize_t written = 0;
655 // regular write() or copy the data to the AudioTrack's shared memory?
656 size_t sizeInBytes = sizeInSamples * sizeof(T);
657 if (track->sharedBuffer() == 0) {
658 written = track->write(data + offsetInSamples, sizeInBytes, blocking);
659 // for compatibility with earlier behavior of write(), return 0 in this case
660 if (written == (ssize_t) WOULD_BLOCK) {
661 written = 0;
662 }
663 } else {
664 // writing to shared memory, check for capacity
665 if ((size_t)sizeInBytes > track->sharedBuffer()->size()) {
666 sizeInBytes = track->sharedBuffer()->size();
667 }
668 memcpy(track->sharedBuffer()->unsecurePointer(), data + offsetInSamples, sizeInBytes);
669 written = sizeInBytes;
670 }
671 if (written >= 0) {
672 return written / sizeof(T);
673 }
674 return interpretWriteSizeError(written);
675 }
676
677 // ----------------------------------------------------------------------------
678 template <typename T>
android_media_AudioTrack_writeArray(JNIEnv * env,jobject thiz,T javaAudioData,jint offsetInSamples,jint sizeInSamples,jint javaAudioFormat,jboolean isWriteBlocking)679 static jint android_media_AudioTrack_writeArray(JNIEnv *env, jobject thiz,
680 T javaAudioData,
681 jint offsetInSamples, jint sizeInSamples,
682 jint javaAudioFormat,
683 jboolean isWriteBlocking) {
684 //ALOGV("android_media_AudioTrack_writeArray(offset=%d, sizeInSamples=%d) called",
685 // offsetInSamples, sizeInSamples);
686 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
687 if (lpTrack == NULL) {
688 jniThrowException(env, "java/lang/IllegalStateException",
689 "Unable to retrieve AudioTrack pointer for write()");
690 return (jint)AUDIO_JAVA_INVALID_OPERATION;
691 }
692
693 if (javaAudioData == NULL) {
694 ALOGE("NULL java array of audio data to play");
695 return (jint)AUDIO_JAVA_BAD_VALUE;
696 }
697
698 // NOTE: We may use GetPrimitiveArrayCritical() when the JNI implementation changes in such
699 // a way that it becomes much more efficient. When doing so, we will have to prevent the
700 // AudioSystem callback to be called while in critical section (in case of media server
701 // process crash for instance)
702
703 // get the pointer for the audio data from the java array
704 auto cAudioData = envGetArrayElements(env, javaAudioData, NULL);
705 if (cAudioData == NULL) {
706 ALOGE("Error retrieving source of audio data to play");
707 return (jint)AUDIO_JAVA_BAD_VALUE; // out of memory or no data to load
708 }
709
710 jint samplesWritten = writeToTrack(lpTrack, javaAudioFormat, cAudioData,
711 offsetInSamples, sizeInSamples, isWriteBlocking == JNI_TRUE /* blocking */);
712
713 envReleaseArrayElements(env, javaAudioData, cAudioData, 0);
714
715 //ALOGV("write wrote %d (tried %d) samples in the native AudioTrack with offset %d",
716 // (int)samplesWritten, (int)(sizeInSamples), (int)offsetInSamples);
717 return samplesWritten;
718 }
719
720 // ----------------------------------------------------------------------------
android_media_AudioTrack_write_native_bytes(JNIEnv * env,jobject thiz,jobject javaByteBuffer,jint byteOffset,jint sizeInBytes,jint javaAudioFormat,jboolean isWriteBlocking)721 static jint android_media_AudioTrack_write_native_bytes(JNIEnv *env, jobject thiz,
722 jobject javaByteBuffer, jint byteOffset, jint sizeInBytes,
723 jint javaAudioFormat, jboolean isWriteBlocking) {
724 //ALOGV("android_media_AudioTrack_write_native_bytes(offset=%d, sizeInBytes=%d) called",
725 // offsetInBytes, sizeInBytes);
726 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
727 if (lpTrack == NULL) {
728 jniThrowException(env, "java/lang/IllegalStateException",
729 "Unable to retrieve AudioTrack pointer for write()");
730 return (jint)AUDIO_JAVA_INVALID_OPERATION;
731 }
732
733 const jbyte* bytes =
734 reinterpret_cast<const jbyte*>(env->GetDirectBufferAddress(javaByteBuffer));
735 if (bytes == NULL) {
736 ALOGE("Error retrieving source of audio data to play, can't play");
737 return (jint)AUDIO_JAVA_BAD_VALUE;
738 }
739
740 jint written = writeToTrack(lpTrack, javaAudioFormat, bytes, byteOffset,
741 sizeInBytes, isWriteBlocking == JNI_TRUE /* blocking */);
742
743 return written;
744 }
745
746 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_buffer_size_frames(JNIEnv * env,jobject thiz)747 static jint android_media_AudioTrack_get_buffer_size_frames(JNIEnv *env, jobject thiz) {
748 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
749 if (lpTrack == NULL) {
750 jniThrowException(env, "java/lang/IllegalStateException",
751 "Unable to retrieve AudioTrack pointer for getBufferSizeInFrames()");
752 return (jint)AUDIO_JAVA_ERROR;
753 }
754
755 ssize_t result = lpTrack->getBufferSizeInFrames();
756 if (result < 0) {
757 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
758 "Internal error detected in getBufferSizeInFrames() = %zd", result);
759 return (jint)AUDIO_JAVA_ERROR;
760 }
761 return (jint)result;
762 }
763
764 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_buffer_size_frames(JNIEnv * env,jobject thiz,jint bufferSizeInFrames)765 static jint android_media_AudioTrack_set_buffer_size_frames(JNIEnv *env,
766 jobject thiz, jint bufferSizeInFrames) {
767 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
768 if (lpTrack == NULL) {
769 jniThrowException(env, "java/lang/IllegalStateException",
770 "Unable to retrieve AudioTrack pointer for setBufferSizeInFrames()");
771 return (jint)AUDIO_JAVA_ERROR;
772 }
773 // Value will be coerced into the valid range.
774 // But internal values are unsigned, size_t, so we need to clip
775 // against zero here where it is signed.
776 if (bufferSizeInFrames < 0) {
777 bufferSizeInFrames = 0;
778 }
779 ssize_t result = lpTrack->setBufferSizeInFrames(bufferSizeInFrames);
780 if (result < 0) {
781 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
782 "Internal error detected in setBufferSizeInFrames() = %zd", result);
783 return (jint)AUDIO_JAVA_ERROR;
784 }
785 return (jint)result;
786 }
787
788 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_buffer_capacity_frames(JNIEnv * env,jobject thiz)789 static jint android_media_AudioTrack_get_buffer_capacity_frames(JNIEnv *env, jobject thiz) {
790 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
791 if (lpTrack == NULL) {
792 jniThrowException(env, "java/lang/IllegalStateException",
793 "Unable to retrieve AudioTrack pointer for getBufferCapacityInFrames()");
794 return (jint)AUDIO_JAVA_ERROR;
795 }
796
797 return lpTrack->frameCount();
798 }
799
800 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_playback_rate(JNIEnv * env,jobject thiz,jint sampleRateInHz)801 static jint android_media_AudioTrack_set_playback_rate(JNIEnv *env, jobject thiz,
802 jint sampleRateInHz) {
803 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
804 if (lpTrack == NULL) {
805 jniThrowException(env, "java/lang/IllegalStateException",
806 "Unable to retrieve AudioTrack pointer for setSampleRate()");
807 return (jint)AUDIO_JAVA_ERROR;
808 }
809 return nativeToJavaStatus(lpTrack->setSampleRate(sampleRateInHz));
810 }
811
812
813 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_playback_rate(JNIEnv * env,jobject thiz)814 static jint android_media_AudioTrack_get_playback_rate(JNIEnv *env, jobject thiz) {
815 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
816 if (lpTrack == NULL) {
817 jniThrowException(env, "java/lang/IllegalStateException",
818 "Unable to retrieve AudioTrack pointer for getSampleRate()");
819 return (jint)AUDIO_JAVA_ERROR;
820 }
821 return (jint) lpTrack->getSampleRate();
822 }
823
824
825 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_playback_params(JNIEnv * env,jobject thiz,jobject params)826 static void android_media_AudioTrack_set_playback_params(JNIEnv *env, jobject thiz,
827 jobject params) {
828 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
829 if (lpTrack == NULL) {
830 jniThrowException(env, "java/lang/IllegalStateException",
831 "AudioTrack not initialized");
832 return;
833 }
834
835 PlaybackParams pbp;
836 pbp.fillFromJobject(env, gPlaybackParamsFields, params);
837
838 ALOGV("setPlaybackParams: %d:%f %d:%f %d:%u %d:%u",
839 pbp.speedSet, pbp.audioRate.mSpeed,
840 pbp.pitchSet, pbp.audioRate.mPitch,
841 pbp.audioFallbackModeSet, pbp.audioRate.mFallbackMode,
842 pbp.audioStretchModeSet, pbp.audioRate.mStretchMode);
843
844 // to simulate partially set params, we do a read-modify-write.
845 // TODO: pass in the valid set mask into AudioTrack.
846 AudioPlaybackRate rate = lpTrack->getPlaybackRate();
847 bool updatedRate = false;
848 if (pbp.speedSet) {
849 rate.mSpeed = pbp.audioRate.mSpeed;
850 updatedRate = true;
851 }
852 if (pbp.pitchSet) {
853 rate.mPitch = pbp.audioRate.mPitch;
854 updatedRate = true;
855 }
856 if (pbp.audioFallbackModeSet) {
857 rate.mFallbackMode = pbp.audioRate.mFallbackMode;
858 updatedRate = true;
859 }
860 if (pbp.audioStretchModeSet) {
861 rate.mStretchMode = pbp.audioRate.mStretchMode;
862 updatedRate = true;
863 }
864 if (updatedRate) {
865 if (lpTrack->setPlaybackRate(rate) != OK) {
866 jniThrowException(env, "java/lang/IllegalArgumentException",
867 "arguments out of range");
868 }
869 }
870 }
871
872
873 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_playback_params(JNIEnv * env,jobject thiz,jobject params)874 static jobject android_media_AudioTrack_get_playback_params(JNIEnv *env, jobject thiz,
875 jobject params) {
876 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
877 if (lpTrack == NULL) {
878 jniThrowException(env, "java/lang/IllegalStateException",
879 "AudioTrack not initialized");
880 return NULL;
881 }
882
883 PlaybackParams pbs;
884 pbs.audioRate = lpTrack->getPlaybackRate();
885 pbs.speedSet = true;
886 pbs.pitchSet = true;
887 pbs.audioFallbackModeSet = true;
888 pbs.audioStretchModeSet = true;
889 return pbs.asJobject(env, gPlaybackParamsFields);
890 }
891
892
893 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_marker_pos(JNIEnv * env,jobject thiz,jint markerPos)894 static jint android_media_AudioTrack_set_marker_pos(JNIEnv *env, jobject thiz,
895 jint markerPos) {
896 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
897 if (lpTrack == NULL) {
898 jniThrowException(env, "java/lang/IllegalStateException",
899 "Unable to retrieve AudioTrack pointer for setMarkerPosition()");
900 return (jint)AUDIO_JAVA_ERROR;
901 }
902 return nativeToJavaStatus( lpTrack->setMarkerPosition(markerPos) );
903 }
904
905
906 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_marker_pos(JNIEnv * env,jobject thiz)907 static jint android_media_AudioTrack_get_marker_pos(JNIEnv *env, jobject thiz) {
908 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
909 uint32_t markerPos = 0;
910
911 if (lpTrack == NULL) {
912 jniThrowException(env, "java/lang/IllegalStateException",
913 "Unable to retrieve AudioTrack pointer for getMarkerPosition()");
914 return (jint)AUDIO_JAVA_ERROR;
915 }
916 lpTrack->getMarkerPosition(&markerPos);
917 return (jint)markerPos;
918 }
919
920
921 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_pos_update_period(JNIEnv * env,jobject thiz,jint period)922 static jint android_media_AudioTrack_set_pos_update_period(JNIEnv *env, jobject thiz,
923 jint period) {
924 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
925 if (lpTrack == NULL) {
926 jniThrowException(env, "java/lang/IllegalStateException",
927 "Unable to retrieve AudioTrack pointer for setPositionUpdatePeriod()");
928 return (jint)AUDIO_JAVA_ERROR;
929 }
930 return nativeToJavaStatus( lpTrack->setPositionUpdatePeriod(period) );
931 }
932
933
934 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_pos_update_period(JNIEnv * env,jobject thiz)935 static jint android_media_AudioTrack_get_pos_update_period(JNIEnv *env, jobject thiz) {
936 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
937 uint32_t period = 0;
938
939 if (lpTrack == NULL) {
940 jniThrowException(env, "java/lang/IllegalStateException",
941 "Unable to retrieve AudioTrack pointer for getPositionUpdatePeriod()");
942 return (jint)AUDIO_JAVA_ERROR;
943 }
944 lpTrack->getPositionUpdatePeriod(&period);
945 return (jint)period;
946 }
947
948
949 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_position(JNIEnv * env,jobject thiz,jint position)950 static jint android_media_AudioTrack_set_position(JNIEnv *env, jobject thiz,
951 jint position) {
952 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
953 if (lpTrack == NULL) {
954 jniThrowException(env, "java/lang/IllegalStateException",
955 "Unable to retrieve AudioTrack pointer for setPosition()");
956 return (jint)AUDIO_JAVA_ERROR;
957 }
958 return nativeToJavaStatus( lpTrack->setPosition(position) );
959 }
960
961
962 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_position(JNIEnv * env,jobject thiz)963 static jint android_media_AudioTrack_get_position(JNIEnv *env, jobject thiz) {
964 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
965 uint32_t position = 0;
966
967 if (lpTrack == NULL) {
968 jniThrowException(env, "java/lang/IllegalStateException",
969 "Unable to retrieve AudioTrack pointer for getPosition()");
970 return (jint)AUDIO_JAVA_ERROR;
971 }
972 lpTrack->getPosition(&position);
973 return (jint)position;
974 }
975
976
977 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_latency(JNIEnv * env,jobject thiz)978 static jint android_media_AudioTrack_get_latency(JNIEnv *env, jobject thiz) {
979 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
980
981 if (lpTrack == NULL) {
982 jniThrowException(env, "java/lang/IllegalStateException",
983 "Unable to retrieve AudioTrack pointer for latency()");
984 return (jint)AUDIO_JAVA_ERROR;
985 }
986 return (jint)lpTrack->latency();
987 }
988
989 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_underrun_count(JNIEnv * env,jobject thiz)990 static jint android_media_AudioTrack_get_underrun_count(JNIEnv *env, jobject thiz) {
991 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
992
993 if (lpTrack == NULL) {
994 jniThrowException(env, "java/lang/IllegalStateException",
995 "Unable to retrieve AudioTrack pointer for getUnderrunCount()");
996 return (jint)AUDIO_JAVA_ERROR;
997 }
998 return (jint)lpTrack->getUnderrunCount();
999 }
1000
1001 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_flags(JNIEnv * env,jobject thiz)1002 static jint android_media_AudioTrack_get_flags(JNIEnv *env, jobject thiz) {
1003 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1004
1005 if (lpTrack == NULL) {
1006 jniThrowException(env, "java/lang/IllegalStateException",
1007 "Unable to retrieve AudioTrack pointer for getFlags()");
1008 return (jint)AUDIO_JAVA_ERROR;
1009 }
1010 return (jint)lpTrack->getFlags();
1011 }
1012
1013 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_timestamp(JNIEnv * env,jobject thiz,jlongArray jTimestamp)1014 static jint android_media_AudioTrack_get_timestamp(JNIEnv *env, jobject thiz, jlongArray jTimestamp) {
1015 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1016
1017 if (lpTrack == NULL) {
1018 ALOGE("Unable to retrieve AudioTrack pointer for getTimestamp()");
1019 return (jint)AUDIO_JAVA_ERROR;
1020 }
1021 AudioTimestamp timestamp;
1022 status_t status = lpTrack->getTimestamp(timestamp);
1023 if (status == OK) {
1024 jlong* nTimestamp = env->GetLongArrayElements(jTimestamp, nullptr /* isCopy */);
1025 if (nTimestamp == NULL) {
1026 ALOGE("Unable to get array for getTimestamp()");
1027 return (jint)AUDIO_JAVA_ERROR;
1028 }
1029 nTimestamp[0] = static_cast<jlong>(timestamp.mPosition);
1030 nTimestamp[1] = static_cast<jlong>((timestamp.mTime.tv_sec * 1000000000LL) +
1031 timestamp.mTime.tv_nsec);
1032 env->ReleaseLongArrayElements(jTimestamp, nTimestamp, 0 /* mode */);
1033 }
1034 return (jint) nativeToJavaStatus(status);
1035 }
1036
1037 // ----------------------------------------------------------------------------
1038 static jobject
android_media_AudioTrack_native_getMetrics(JNIEnv * env,jobject thiz)1039 android_media_AudioTrack_native_getMetrics(JNIEnv *env, jobject thiz)
1040 {
1041 ALOGD("android_media_AudioTrack_native_getMetrics");
1042
1043 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1044
1045 if (lpTrack == NULL) {
1046 ALOGE("Unable to retrieve AudioTrack pointer for getMetrics()");
1047 jniThrowException(env, "java/lang/IllegalStateException", NULL);
1048 return (jobject) NULL;
1049 }
1050
1051 // get what we have for the metrics from the track
1052 mediametrics::Item *item = NULL;
1053
1054 status_t err = lpTrack->getMetrics(item);
1055 if (err != OK) {
1056 ALOGE("getMetrics failed");
1057 jniThrowException(env, "java/lang/IllegalStateException", NULL);
1058 return (jobject) NULL;
1059 }
1060
1061 jobject mybundle = MediaMetricsJNI::writeMetricsToBundle(env, item, NULL /* mybundle */);
1062
1063 // housekeeping
1064 delete item;
1065 item = NULL;
1066
1067 return mybundle;
1068 }
1069
1070
1071 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_loop(JNIEnv * env,jobject thiz,jint loopStart,jint loopEnd,jint loopCount)1072 static jint android_media_AudioTrack_set_loop(JNIEnv *env, jobject thiz,
1073 jint loopStart, jint loopEnd, jint loopCount) {
1074 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1075 if (lpTrack == NULL) {
1076 jniThrowException(env, "java/lang/IllegalStateException",
1077 "Unable to retrieve AudioTrack pointer for setLoop()");
1078 return (jint)AUDIO_JAVA_ERROR;
1079 }
1080 return nativeToJavaStatus( lpTrack->setLoop(loopStart, loopEnd, loopCount) );
1081 }
1082
1083
1084 // ----------------------------------------------------------------------------
android_media_AudioTrack_reload(JNIEnv * env,jobject thiz)1085 static jint android_media_AudioTrack_reload(JNIEnv *env, jobject thiz) {
1086 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1087 if (lpTrack == NULL) {
1088 jniThrowException(env, "java/lang/IllegalStateException",
1089 "Unable to retrieve AudioTrack pointer for reload()");
1090 return (jint)AUDIO_JAVA_ERROR;
1091 }
1092 return nativeToJavaStatus( lpTrack->reload() );
1093 }
1094
1095
1096 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_output_sample_rate(JNIEnv * env,jobject thiz,jint javaStreamType)1097 static jint android_media_AudioTrack_get_output_sample_rate(JNIEnv *env, jobject thiz,
1098 jint javaStreamType) {
1099 uint32_t afSamplingRate;
1100 // convert the stream type from Java to native value
1101 // FIXME: code duplication with android_media_AudioTrack_setup()
1102 audio_stream_type_t nativeStreamType;
1103 switch (javaStreamType) {
1104 case AUDIO_STREAM_VOICE_CALL:
1105 case AUDIO_STREAM_SYSTEM:
1106 case AUDIO_STREAM_RING:
1107 case AUDIO_STREAM_MUSIC:
1108 case AUDIO_STREAM_ALARM:
1109 case AUDIO_STREAM_NOTIFICATION:
1110 case AUDIO_STREAM_BLUETOOTH_SCO:
1111 case AUDIO_STREAM_DTMF:
1112 nativeStreamType = (audio_stream_type_t) javaStreamType;
1113 break;
1114 default:
1115 nativeStreamType = AUDIO_STREAM_DEFAULT;
1116 break;
1117 }
1118
1119 status_t status = AudioSystem::getOutputSamplingRate(&afSamplingRate, nativeStreamType);
1120 if (status != NO_ERROR) {
1121 ALOGE("Error %d in AudioSystem::getOutputSamplingRate() for stream type %d "
1122 "in AudioTrack JNI", status, nativeStreamType);
1123 return DEFAULT_OUTPUT_SAMPLE_RATE;
1124 } else {
1125 return afSamplingRate;
1126 }
1127 }
1128
1129
1130 // ----------------------------------------------------------------------------
1131 // returns the minimum required size for the successful creation of a streaming AudioTrack
1132 // returns -1 if there was an error querying the hardware.
android_media_AudioTrack_get_min_buff_size(JNIEnv * env,jobject thiz,jint sampleRateInHertz,jint channelCount,jint audioFormat)1133 static jint android_media_AudioTrack_get_min_buff_size(JNIEnv *env, jobject thiz,
1134 jint sampleRateInHertz, jint channelCount, jint audioFormat) {
1135
1136 size_t frameCount;
1137 const status_t status = AudioTrack::getMinFrameCount(&frameCount, AUDIO_STREAM_DEFAULT,
1138 sampleRateInHertz);
1139 if (status != NO_ERROR) {
1140 ALOGE("AudioTrack::getMinFrameCount() for sample rate %d failed with status %d",
1141 sampleRateInHertz, status);
1142 return -1;
1143 }
1144 const audio_format_t format = audioFormatToNative(audioFormat);
1145 if (audio_has_proportional_frames(format)) {
1146 const size_t bytesPerSample = audio_bytes_per_sample(format);
1147 return frameCount * channelCount * bytesPerSample;
1148 } else {
1149 return frameCount;
1150 }
1151 }
1152
1153 // ----------------------------------------------------------------------------
1154 static jint
android_media_AudioTrack_setAuxEffectSendLevel(JNIEnv * env,jobject thiz,jfloat level)1155 android_media_AudioTrack_setAuxEffectSendLevel(JNIEnv *env, jobject thiz, jfloat level )
1156 {
1157 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1158 if (lpTrack == NULL ) {
1159 jniThrowException(env, "java/lang/IllegalStateException",
1160 "Unable to retrieve AudioTrack pointer for setAuxEffectSendLevel()");
1161 return -1;
1162 }
1163
1164 status_t status = lpTrack->setAuxEffectSendLevel(level);
1165 if (status != NO_ERROR) {
1166 ALOGE("AudioTrack::setAuxEffectSendLevel() for level %g failed with status %d",
1167 level, status);
1168 }
1169 return (jint) status;
1170 }
1171
1172 // ----------------------------------------------------------------------------
android_media_AudioTrack_attachAuxEffect(JNIEnv * env,jobject thiz,jint effectId)1173 static jint android_media_AudioTrack_attachAuxEffect(JNIEnv *env, jobject thiz,
1174 jint effectId) {
1175 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1176 if (lpTrack == NULL) {
1177 jniThrowException(env, "java/lang/IllegalStateException",
1178 "Unable to retrieve AudioTrack pointer for attachAuxEffect()");
1179 return (jint)AUDIO_JAVA_ERROR;
1180 }
1181 return nativeToJavaStatus( lpTrack->attachAuxEffect(effectId) );
1182 }
1183
android_media_AudioTrack_setOutputDevice(JNIEnv * env,jobject thiz,jint device_id)1184 static jboolean android_media_AudioTrack_setOutputDevice(
1185 JNIEnv *env, jobject thiz, jint device_id) {
1186
1187 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1188 if (lpTrack == 0) {
1189 return false;
1190 }
1191 return lpTrack->setOutputDevice(device_id) == NO_ERROR;
1192 }
android_media_AudioTrack_getRoutedDeviceIds(JNIEnv * env,jobject thiz)1193 static jintArray android_media_AudioTrack_getRoutedDeviceIds(JNIEnv *env, jobject thiz) {
1194 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1195 if (lpTrack == NULL) {
1196 return NULL;
1197 }
1198 DeviceIdVector deviceIds = lpTrack->getRoutedDeviceIds();
1199 jintArray result;
1200 result = env->NewIntArray(deviceIds.size());
1201 if (result == NULL) {
1202 return NULL;
1203 }
1204 jint *values = env->GetIntArrayElements(result, 0);
1205 for (unsigned int i = 0; i < deviceIds.size(); i++) {
1206 values[i++] = static_cast<jint>(deviceIds[i]);
1207 }
1208 env->ReleaseIntArrayElements(result, values, 0);
1209 return result;
1210 }
1211
android_media_AudioTrack_enableDeviceCallback(JNIEnv * env,jobject thiz)1212 static void android_media_AudioTrack_enableDeviceCallback(
1213 JNIEnv *env, jobject thiz) {
1214
1215 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1216 if (lpTrack == nullptr) {
1217 return;
1218 }
1219 const auto pJniStorage =
1220 getFieldSp<AudioTrackJniStorage>(env, thiz, javaAudioTrackFields.jniData);
1221 if (pJniStorage == nullptr || pJniStorage->mDeviceCallback != nullptr) {
1222 return;
1223 }
1224
1225 pJniStorage->mDeviceCallback =
1226 sp<JNIDeviceCallback>::make(env, thiz, pJniStorage->getAudioTrackWeakRef(),
1227 javaAudioTrackFields.postNativeEventInJava);
1228 lpTrack->addAudioDeviceCallback(pJniStorage->mDeviceCallback);
1229 }
1230
android_media_AudioTrack_disableDeviceCallback(JNIEnv * env,jobject thiz)1231 static void android_media_AudioTrack_disableDeviceCallback(
1232 JNIEnv *env, jobject thiz) {
1233
1234 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1235 if (lpTrack == nullptr) {
1236 return;
1237 }
1238 const auto pJniStorage =
1239 getFieldSp<AudioTrackJniStorage>(env, thiz, javaAudioTrackFields.jniData);
1240
1241 if (pJniStorage == nullptr || pJniStorage->mDeviceCallback == nullptr) {
1242 return;
1243 }
1244 lpTrack->removeAudioDeviceCallback(pJniStorage->mDeviceCallback);
1245 pJniStorage->mDeviceCallback.clear();
1246 }
1247
1248 // Pass through the arguments to the AudioFlinger track implementation.
android_media_AudioTrack_apply_volume_shaper(JNIEnv * env,jobject thiz,jobject jconfig,jobject joperation)1249 static jint android_media_AudioTrack_apply_volume_shaper(JNIEnv *env, jobject thiz,
1250 jobject jconfig, jobject joperation) {
1251 // NOTE: hard code here to prevent platform issues. Must match VolumeShaper.java
1252 const int VOLUME_SHAPER_INVALID_OPERATION = -38;
1253
1254 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1255 if (lpTrack == nullptr) {
1256 return (jint)VOLUME_SHAPER_INVALID_OPERATION;
1257 }
1258
1259 sp<VolumeShaper::Configuration> configuration;
1260 sp<VolumeShaper::Operation> operation;
1261 if (jconfig != nullptr) {
1262 configuration = VolumeShaperHelper::convertJobjectToConfiguration(
1263 env, gVolumeShaperFields, jconfig);
1264 ALOGV("applyVolumeShaper configuration: %s", configuration->toString().c_str());
1265 }
1266 if (joperation != nullptr) {
1267 operation = VolumeShaperHelper::convertJobjectToOperation(
1268 env, gVolumeShaperFields, joperation);
1269 ALOGV("applyVolumeShaper operation: %s", operation->toString().c_str());
1270 }
1271 VolumeShaper::Status status = lpTrack->applyVolumeShaper(configuration, operation);
1272 if (status == INVALID_OPERATION) {
1273 status = VOLUME_SHAPER_INVALID_OPERATION;
1274 }
1275 return (jint)status; // if status < 0 an error, else a VolumeShaper id
1276 }
1277
1278 // Pass through the arguments to the AudioFlinger track implementation.
android_media_AudioTrack_get_volume_shaper_state(JNIEnv * env,jobject thiz,jint id)1279 static jobject android_media_AudioTrack_get_volume_shaper_state(JNIEnv *env, jobject thiz,
1280 jint id) {
1281 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1282 if (lpTrack == nullptr) {
1283 return (jobject)nullptr;
1284 }
1285
1286 sp<VolumeShaper::State> state = lpTrack->getVolumeShaperState((int)id);
1287 if (state.get() == nullptr) {
1288 return (jobject)nullptr;
1289 }
1290 return VolumeShaperHelper::convertStateToJobject(env, gVolumeShaperFields, state);
1291 }
1292
android_media_AudioTrack_setPresentation(JNIEnv * env,jobject thiz,jint presentationId,jint programId)1293 static int android_media_AudioTrack_setPresentation(
1294 JNIEnv *env, jobject thiz, jint presentationId, jint programId) {
1295 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1296 if (lpTrack == NULL) {
1297 jniThrowException(env, "java/lang/IllegalStateException",
1298 "AudioTrack not initialized");
1299 return (jint)AUDIO_JAVA_ERROR;
1300 }
1301
1302 return (jint)lpTrack->selectPresentation((int)presentationId, (int)programId);
1303 }
1304
1305 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_port_id(JNIEnv * env,jobject thiz)1306 static jint android_media_AudioTrack_get_port_id(JNIEnv *env, jobject thiz) {
1307 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1308 if (lpTrack == NULL) {
1309 jniThrowException(env, "java/lang/IllegalStateException",
1310 "AudioTrack not initialized");
1311 return (jint)AUDIO_PORT_HANDLE_NONE;
1312 }
1313 return (jint)lpTrack->getPortId();
1314 }
1315
1316 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_delay_padding(JNIEnv * env,jobject thiz,jint delayInFrames,jint paddingInFrames)1317 static void android_media_AudioTrack_set_delay_padding(JNIEnv *env, jobject thiz,
1318 jint delayInFrames, jint paddingInFrames) {
1319 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1320 if (lpTrack == NULL) {
1321 jniThrowException(env, "java/lang/IllegalStateException",
1322 "AudioTrack not initialized");
1323 return;
1324 }
1325 AudioParameter param = AudioParameter();
1326 param.addInt(String8(AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES), (int) delayInFrames);
1327 param.addInt(String8(AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES), (int) paddingInFrames);
1328 lpTrack->setParameters(param.toString());
1329 }
1330
android_media_AudioTrack_setAudioDescriptionMixLeveldB(JNIEnv * env,jobject thiz,jfloat level)1331 static jint android_media_AudioTrack_setAudioDescriptionMixLeveldB(JNIEnv *env, jobject thiz,
1332 jfloat level) {
1333 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1334 if (lpTrack == nullptr) {
1335 jniThrowException(env, "java/lang/IllegalStateException", "AudioTrack not initialized");
1336 return (jint)AUDIO_JAVA_ERROR;
1337 }
1338
1339 return nativeToJavaStatus(lpTrack->setAudioDescriptionMixLevel(level));
1340 }
1341
android_media_AudioTrack_getAudioDescriptionMixLeveldB(JNIEnv * env,jobject thiz,jfloatArray level)1342 static jint android_media_AudioTrack_getAudioDescriptionMixLeveldB(JNIEnv *env, jobject thiz,
1343 jfloatArray level) {
1344 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1345 if (lpTrack == nullptr) {
1346 ALOGE("%s: AudioTrack not initialized", __func__);
1347 return (jint)AUDIO_JAVA_ERROR;
1348 }
1349 jfloat *nativeLevel = env->GetFloatArrayElements(level, nullptr /* isCopy */);
1350 if (nativeLevel == nullptr) {
1351 ALOGE("%s: Cannot retrieve level pointer", __func__);
1352 return (jint)AUDIO_JAVA_ERROR;
1353 }
1354
1355 status_t status = lpTrack->getAudioDescriptionMixLevel(reinterpret_cast<float *>(nativeLevel));
1356 env->ReleaseFloatArrayElements(level, nativeLevel, 0 /* mode */);
1357
1358 return nativeToJavaStatus(status);
1359 }
1360
android_media_AudioTrack_setDualMonoMode(JNIEnv * env,jobject thiz,jint dualMonoMode)1361 static jint android_media_AudioTrack_setDualMonoMode(JNIEnv *env, jobject thiz, jint dualMonoMode) {
1362 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1363 if (lpTrack == nullptr) {
1364 jniThrowException(env, "java/lang/IllegalStateException", "AudioTrack not initialized");
1365 return (jint)AUDIO_JAVA_ERROR;
1366 }
1367
1368 return nativeToJavaStatus(
1369 lpTrack->setDualMonoMode(static_cast<audio_dual_mono_mode_t>(dualMonoMode)));
1370 }
1371
android_media_AudioTrack_getDualMonoMode(JNIEnv * env,jobject thiz,jintArray dualMonoMode)1372 static jint android_media_AudioTrack_getDualMonoMode(JNIEnv *env, jobject thiz,
1373 jintArray dualMonoMode) {
1374 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1375 if (lpTrack == nullptr) {
1376 ALOGE("%s: AudioTrack not initialized", __func__);
1377 return (jint)AUDIO_JAVA_ERROR;
1378 }
1379 jint *nativeDualMonoMode = env->GetIntArrayElements(dualMonoMode, nullptr /* isCopy */);
1380 if (nativeDualMonoMode == nullptr) {
1381 ALOGE("%s: Cannot retrieve dualMonoMode pointer", __func__);
1382 return (jint)AUDIO_JAVA_ERROR;
1383 }
1384
1385 status_t status = lpTrack->getDualMonoMode(
1386 reinterpret_cast<audio_dual_mono_mode_t *>(nativeDualMonoMode));
1387 env->ReleaseIntArrayElements(dualMonoMode, nativeDualMonoMode, 0 /* mode */);
1388
1389 return nativeToJavaStatus(status);
1390 }
1391
android_media_AudioTrack_setLogSessionId(JNIEnv * env,jobject thiz,jstring jlogSessionId)1392 static void android_media_AudioTrack_setLogSessionId(JNIEnv *env, jobject thiz,
1393 jstring jlogSessionId) {
1394 sp<AudioTrack> track = getAudioTrack(env, thiz);
1395 if (track == nullptr) {
1396 jniThrowException(env, "java/lang/IllegalStateException",
1397 "Unable to retrieve AudioTrack pointer for setLogSessionId()");
1398 }
1399 if (jlogSessionId == nullptr) {
1400 ALOGV("%s: logSessionId nullptr", __func__);
1401 track->setLogSessionId(nullptr);
1402 return;
1403 }
1404 ScopedUtfChars logSessionId(env, jlogSessionId);
1405 ALOGV("%s: logSessionId '%s'", __func__, logSessionId.c_str());
1406 track->setLogSessionId(logSessionId.c_str());
1407 }
1408
android_media_AudioTrack_setPlayerIId(JNIEnv * env,jobject thiz,jint playerIId)1409 static void android_media_AudioTrack_setPlayerIId(JNIEnv *env, jobject thiz, jint playerIId) {
1410 sp<AudioTrack> track = getAudioTrack(env, thiz);
1411 if (track == nullptr) {
1412 jniThrowException(env, "java/lang/IllegalStateException",
1413 "Unable to retrieve AudioTrack pointer for setPlayerIId()");
1414 }
1415 ALOGV("%s: playerIId %d", __func__, playerIId);
1416 track->setPlayerIId(playerIId);
1417 }
1418
android_media_AudioTrack_getStartThresholdInFrames(JNIEnv * env,jobject thiz)1419 static jint android_media_AudioTrack_getStartThresholdInFrames(JNIEnv *env, jobject thiz) {
1420 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1421 if (lpTrack == nullptr) {
1422 jniThrowException(env, "java/lang/IllegalStateException",
1423 "Unable to retrieve AudioTrack pointer for getStartThresholdInFrames()");
1424 return (jint)AUDIO_JAVA_ERROR;
1425 }
1426 const ssize_t result = lpTrack->getStartThresholdInFrames();
1427 if (result <= 0) {
1428 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1429 "Internal error detected in getStartThresholdInFrames() = %zd",
1430 result);
1431 return (jint)AUDIO_JAVA_ERROR;
1432 }
1433 return (jint)result; // this should be a positive value.
1434 }
1435
android_media_AudioTrack_setStartThresholdInFrames(JNIEnv * env,jobject thiz,jint startThresholdInFrames)1436 static jint android_media_AudioTrack_setStartThresholdInFrames(JNIEnv *env, jobject thiz,
1437 jint startThresholdInFrames) {
1438 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1439 if (lpTrack == nullptr) {
1440 jniThrowException(env, "java/lang/IllegalStateException",
1441 "Unable to retrieve AudioTrack pointer for setStartThresholdInFrames()");
1442 return (jint)AUDIO_JAVA_ERROR;
1443 }
1444 // non-positive values of startThresholdInFrames are not allowed by the Java layer.
1445 const ssize_t result = lpTrack->setStartThresholdInFrames(startThresholdInFrames);
1446 if (result <= 0) {
1447 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1448 "Internal error detected in setStartThresholdInFrames() = %zd",
1449 result);
1450 return (jint)AUDIO_JAVA_ERROR;
1451 }
1452 return (jint)result; // this should be a positive value.
1453 }
1454
1455 // ----------------------------------------------------------------------------
1456 // ----------------------------------------------------------------------------
1457 static const JNINativeMethod gMethods[] = {
1458 // name, signature, funcPtr
1459 {"native_is_direct_output_supported", "(IIIIIII)Z",
1460 (void *)android_media_AudioTrack_is_direct_output_supported},
1461 {"native_start", "()V", (void *)android_media_AudioTrack_start},
1462 {"native_stop", "()V", (void *)android_media_AudioTrack_stop},
1463 {"native_pause", "()V", (void *)android_media_AudioTrack_pause},
1464 {"native_flush", "()V", (void *)android_media_AudioTrack_flush},
1465 {"native_setup",
1466 "(Ljava/lang/Object;Ljava/lang/Object;[IIIIII[ILandroid/os/Parcel;"
1467 "JZILjava/lang/Object;Ljava/lang/String;)I",
1468 (void *)android_media_AudioTrack_setup},
1469 {"native_finalize", "()V", (void *)android_media_AudioTrack_finalize},
1470 {"native_release", "()V", (void *)android_media_AudioTrack_release},
1471 {"native_write_byte", "([BIIIZ)I", (void *)android_media_AudioTrack_writeArray<jbyteArray>},
1472 {"native_write_native_bytes", "(Ljava/nio/ByteBuffer;IIIZ)I",
1473 (void *)android_media_AudioTrack_write_native_bytes},
1474 {"native_write_short", "([SIIIZ)I",
1475 (void *)android_media_AudioTrack_writeArray<jshortArray>},
1476 {"native_write_float", "([FIIIZ)I",
1477 (void *)android_media_AudioTrack_writeArray<jfloatArray>},
1478 {"native_setVolume", "(FF)V", (void *)android_media_AudioTrack_set_volume},
1479 {"native_get_buffer_size_frames", "()I",
1480 (void *)android_media_AudioTrack_get_buffer_size_frames},
1481 {"native_set_buffer_size_frames", "(I)I",
1482 (void *)android_media_AudioTrack_set_buffer_size_frames},
1483 {"native_get_buffer_capacity_frames", "()I",
1484 (void *)android_media_AudioTrack_get_buffer_capacity_frames},
1485 {"native_set_playback_rate", "(I)I", (void *)android_media_AudioTrack_set_playback_rate},
1486 {"native_get_playback_rate", "()I", (void *)android_media_AudioTrack_get_playback_rate},
1487 {"native_set_playback_params", "(Landroid/media/PlaybackParams;)V",
1488 (void *)android_media_AudioTrack_set_playback_params},
1489 {"native_get_playback_params", "()Landroid/media/PlaybackParams;",
1490 (void *)android_media_AudioTrack_get_playback_params},
1491 {"native_set_marker_pos", "(I)I", (void *)android_media_AudioTrack_set_marker_pos},
1492 {"native_get_marker_pos", "()I", (void *)android_media_AudioTrack_get_marker_pos},
1493 {"native_set_pos_update_period", "(I)I",
1494 (void *)android_media_AudioTrack_set_pos_update_period},
1495 {"native_get_pos_update_period", "()I",
1496 (void *)android_media_AudioTrack_get_pos_update_period},
1497 {"native_set_position", "(I)I", (void *)android_media_AudioTrack_set_position},
1498 {"native_get_position", "()I", (void *)android_media_AudioTrack_get_position},
1499 {"native_get_latency", "()I", (void *)android_media_AudioTrack_get_latency},
1500 {"native_get_underrun_count", "()I", (void *)android_media_AudioTrack_get_underrun_count},
1501 {"native_get_flags", "()I", (void *)android_media_AudioTrack_get_flags},
1502 {"native_get_timestamp", "([J)I", (void *)android_media_AudioTrack_get_timestamp},
1503 {"native_getMetrics", "()Landroid/os/PersistableBundle;",
1504 (void *)android_media_AudioTrack_native_getMetrics},
1505 {"native_set_loop", "(III)I", (void *)android_media_AudioTrack_set_loop},
1506 {"native_reload_static", "()I", (void *)android_media_AudioTrack_reload},
1507 {"native_get_output_sample_rate", "(I)I",
1508 (void *)android_media_AudioTrack_get_output_sample_rate},
1509 {"native_get_min_buff_size", "(III)I", (void *)android_media_AudioTrack_get_min_buff_size},
1510 {"native_setAuxEffectSendLevel", "(F)I",
1511 (void *)android_media_AudioTrack_setAuxEffectSendLevel},
1512 {"native_attachAuxEffect", "(I)I", (void *)android_media_AudioTrack_attachAuxEffect},
1513 {"native_setOutputDevice", "(I)Z", (void *)android_media_AudioTrack_setOutputDevice},
1514 {"native_getRoutedDeviceIds", "()[I", (void *)android_media_AudioTrack_getRoutedDeviceIds},
1515 {"native_enableDeviceCallback", "()V",
1516 (void *)android_media_AudioTrack_enableDeviceCallback},
1517 {"native_disableDeviceCallback", "()V",
1518 (void *)android_media_AudioTrack_disableDeviceCallback},
1519 {"native_applyVolumeShaper",
1520 "(Landroid/media/VolumeShaper$Configuration;Landroid/media/VolumeShaper$Operation;)I",
1521 (void *)android_media_AudioTrack_apply_volume_shaper},
1522 {"native_getVolumeShaperState", "(I)Landroid/media/VolumeShaper$State;",
1523 (void *)android_media_AudioTrack_get_volume_shaper_state},
1524 {"native_setPresentation", "(II)I", (void *)android_media_AudioTrack_setPresentation},
1525 {"native_getPortId", "()I", (void *)android_media_AudioTrack_get_port_id},
1526 {"native_set_delay_padding", "(II)V", (void *)android_media_AudioTrack_set_delay_padding},
1527 {"native_set_audio_description_mix_level_db", "(F)I",
1528 (void *)android_media_AudioTrack_setAudioDescriptionMixLeveldB},
1529 {"native_get_audio_description_mix_level_db", "([F)I",
1530 (void *)android_media_AudioTrack_getAudioDescriptionMixLeveldB},
1531 {"native_set_dual_mono_mode", "(I)I", (void *)android_media_AudioTrack_setDualMonoMode},
1532 {"native_get_dual_mono_mode", "([I)I", (void *)android_media_AudioTrack_getDualMonoMode},
1533 {"native_setLogSessionId", "(Ljava/lang/String;)V",
1534 (void *)android_media_AudioTrack_setLogSessionId},
1535 {"native_setPlayerIId", "(I)V", (void *)android_media_AudioTrack_setPlayerIId},
1536 {"native_setStartThresholdInFrames", "(I)I",
1537 (void *)android_media_AudioTrack_setStartThresholdInFrames},
1538 {"native_getStartThresholdInFrames", "()I",
1539 (void *)android_media_AudioTrack_getStartThresholdInFrames},
1540 };
1541
1542 // field names found in android/media/AudioTrack.java
1543 #define JAVA_POSTEVENT_CALLBACK_NAME "postEventFromNative"
1544 #define JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME "mNativeTrackInJavaObj"
1545 #define JAVA_JNIDATA_FIELD_NAME "mJniData"
1546 #define JAVA_STREAMTYPE_FIELD_NAME "mStreamType"
1547
1548 // ----------------------------------------------------------------------------
1549 // preconditions:
1550 // theClass is valid
android_media_getIntConstantFromClass(JNIEnv * pEnv,jclass theClass,const char * className,const char * constName,int * constVal)1551 bool android_media_getIntConstantFromClass(JNIEnv* pEnv, jclass theClass, const char* className,
1552 const char* constName, int* constVal) {
1553 jfieldID javaConst = NULL;
1554 javaConst = pEnv->GetStaticFieldID(theClass, constName, "I");
1555 if (javaConst != NULL) {
1556 *constVal = pEnv->GetStaticIntField(theClass, javaConst);
1557 return true;
1558 } else {
1559 ALOGE("Can't find %s.%s", className, constName);
1560 return false;
1561 }
1562 }
1563
1564 // ----------------------------------------------------------------------------
register_android_media_AudioTrack(JNIEnv * env)1565 int register_android_media_AudioTrack(JNIEnv *env)
1566 {
1567 // must be first
1568 int res = RegisterMethodsOrDie(env, kClassPathName, gMethods, NELEM(gMethods));
1569
1570 javaAudioTrackFields.nativeTrackInJavaObj = NULL;
1571 javaAudioTrackFields.postNativeEventInJava = NULL;
1572
1573 // Get the AudioTrack class
1574 jclass audioTrackClass = FindClassOrDie(env, kClassPathName);
1575
1576 // Get the postEvent method
1577 javaAudioTrackFields.postNativeEventInJava = GetStaticMethodIDOrDie(env,
1578 audioTrackClass, JAVA_POSTEVENT_CALLBACK_NAME,
1579 "(Ljava/lang/Object;IIILjava/lang/Object;)V");
1580
1581 // Get the variables fields
1582 // nativeTrackInJavaObj
1583 javaAudioTrackFields.nativeTrackInJavaObj = GetFieldIDOrDie(env,
1584 audioTrackClass, JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME, "J");
1585 // jniData
1586 javaAudioTrackFields.jniData = GetFieldIDOrDie(env,
1587 audioTrackClass, JAVA_JNIDATA_FIELD_NAME, "J");
1588 // fieldStreamType
1589 javaAudioTrackFields.fieldStreamType = GetFieldIDOrDie(env,
1590 audioTrackClass, JAVA_STREAMTYPE_FIELD_NAME, "I");
1591
1592 env->DeleteLocalRef(audioTrackClass);
1593
1594 // initialize PlaybackParams field info
1595 gPlaybackParamsFields.init(env);
1596
1597 gVolumeShaperFields.init(env);
1598
1599 // optional check that the TunerConfiguration class and fields exist.
1600 TunerConfigurationHelper::initCheckOrDie(env);
1601
1602 return res;
1603 }
1604
1605
1606 // ----------------------------------------------------------------------------
1607