/frameworks/base/core/java/android/speech/tts/ |
D | FileSynthesisCallback.java | 102 public int start(int sampleRateInHz, int audioFormat, int channelCount) { in start() argument 104 Log.d(TAG, "FileSynthesisRequest.start(" + sampleRateInHz + "," + audioFormat in start() 107 if (audioFormat != AudioFormat.ENCODING_PCM_8BIT && in start() 108 audioFormat != AudioFormat.ENCODING_PCM_16BIT && in start() 109 audioFormat != AudioFormat.ENCODING_PCM_FLOAT) { in start() 110 Log.e(TAG, "Audio format encoding " + audioFormat + " not supported. Please use one " + in start() 114 mDispatcher.dispatchOnBeginSynthesis(sampleRateInHz, audioFormat, channelCount); in start() 132 mAudioFormat = audioFormat; in start() 203 int audioFormat = 0; in done() local 228 audioFormat = mAudioFormat; in done() [all …]
|
D | PlaybackSynthesisCallback.java | 125 public int start(int sampleRateInHz, int audioFormat, int channelCount) { in start() argument 126 if (DBG) Log.d(TAG, "start(" + sampleRateInHz + "," + audioFormat + "," + channelCount in start() 128 if (audioFormat != AudioFormat.ENCODING_PCM_8BIT && in start() 129 audioFormat != AudioFormat.ENCODING_PCM_16BIT && in start() 130 audioFormat != AudioFormat.ENCODING_PCM_FLOAT) { in start() 131 Log.w(TAG, "Audio format encoding " + audioFormat + " not supported. Please use one " + in start() 135 mDispatcher.dispatchOnBeginSynthesis(sampleRateInHz, audioFormat, channelCount); in start() 158 mAudioParams, sampleRateInHz, audioFormat, channelCount, in start()
|
D | BlockingAudioTrack.java | 79 int audioFormat, int channelCount) { in BlockingAudioTrack() argument 82 mAudioFormat = audioFormat; in BlockingAudioTrack() 217 AudioFormat audioFormat = (new AudioFormat.Builder()) in createStreamingAudioTrack() local 222 audioFormat, bufferSizeInBytes, AudioTrack.MODE_STREAM, in createStreamingAudioTrack()
|
D | ITextToSpeechCallback.aidl | 71 … void onBeginSynthesis(String utteranceId, int sampleRateInHz, int audioFormat, int channelCount); in onBeginSynthesis() argument
|
D | SynthesisPlaybackQueueItem.java | 80 int audioFormat, int channelCount, UtteranceProgressDispatcher dispatcher, in SynthesisPlaybackQueueItem() argument 90 mAudioTrack = new BlockingAudioTrack(audioParams, sampleRate, audioFormat, channelCount); in SynthesisPlaybackQueueItem()
|
D | SynthesisCallback.java | 74 @SupportedAudioFormat int audioFormat, in start() argument
|
D | UtteranceProgressListener.java | 99 …public void onBeginSynthesis(String utteranceId, int sampleRateInHz, int audioFormat, int channelC… in onBeginSynthesis() argument
|
D | TextToSpeechService.java | 686 void dispatchOnBeginSynthesis(int sampleRateInHz, int audioFormat, int channelCount); in dispatchOnBeginSynthesis() argument 895 … public void dispatchOnBeginSynthesis(int sampleRateInHz, int audioFormat, int channelCount) { in dispatchOnBeginSynthesis() argument 898 …patchOnBeginSynthesis(getCallerIdentity(), utteranceId, sampleRateInHz, audioFormat, channelCount); in dispatchOnBeginSynthesis() local 1575 …sis(Object callerIdentity, String utteranceId, int sampleRateInHz, int audioFormat, int channelCou… in dispatchOnBeginSynthesis() argument 1579 cb.onBeginSynthesis(utteranceId, sampleRateInHz, audioFormat, channelCount); in dispatchOnBeginSynthesis()
|
/frameworks/base/media/java/android/media/ |
D | AudioFormat.java | 516 public static int getBytesPerSample(int audioFormat) in getBytesPerSample() argument 518 switch (audioFormat) { in getBytesPerSample() 529 throw new IllegalArgumentException("Bad audio format " + audioFormat); in getBytesPerSample() 534 public static boolean isValidEncoding(int audioFormat) in isValidEncoding() argument 536 switch (audioFormat) { in isValidEncoding() 563 public static boolean isPublicEncoding(int audioFormat) in isPublicEncoding() argument 565 switch (audioFormat) { in isPublicEncoding() 593 public static boolean isEncodingLinearPcm(int audioFormat) in isEncodingLinearPcm() argument 595 switch (audioFormat) { in isEncodingLinearPcm() 620 throw new IllegalArgumentException("Bad audio format " + audioFormat); in isEncodingLinearPcm() [all …]
|
D | AudioRecord.java | 298 public AudioRecord(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat, in AudioRecord() argument 307 .setEncoding(audioFormat) in AudioRecord() 764 private void audioParamCheck(int audioSource, int sampleRateInHz, int audioFormat) in audioParamCheck() argument 790 switch (audioFormat) { in audioParamCheck() 797 mAudioFormat = audioFormat; in audioParamCheck() 800 throw new IllegalArgumentException("Unsupported sample encoding " + audioFormat in audioParamCheck() 1025 static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) { in getMinBufferSize() argument 1044 int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat); in getMinBufferSize() 1923 int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat, in native_setup() argument 1961 int sampleRateInHz, int channelCount, int audioFormat); in native_get_min_buff_size() argument
|
D | AudioTrack.java | 487 public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, in AudioTrack() argument 490 this(streamType, sampleRateInHz, channelConfig, audioFormat, in AudioTrack() 545 public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, in AudioTrack() argument 554 .setEncoding(audioFormat) in AudioTrack() 1245 int audioFormat, int mode) { in audioParamCheck() argument 1259 if (audioFormat == AudioFormat.ENCODING_IEC61937 in audioParamCheck() 1312 if (audioFormat == AudioFormat.ENCODING_DEFAULT) { in audioParamCheck() 1313 audioFormat = AudioFormat.ENCODING_PCM_16BIT; in audioParamCheck() 1316 if (!AudioFormat.isPublicEncoding(audioFormat)) { in audioParamCheck() 1319 mAudioFormat = audioFormat; in audioParamCheck() [all …]
|
D | AudioPlaybackCaptureConfiguration.java | 131 @NonNull AudioMix createAudioMix(@NonNull AudioFormat audioFormat) { in createAudioMix() argument 133 .setFormat(audioFormat) in createAudioMix()
|
D | AudioSystem.java | 199 public static int audioFormatToBluetoothSourceCodec(int audioFormat) { in audioFormatToBluetoothSourceCodec() argument 200 switch (audioFormat) { in audioFormatToBluetoothSourceCodec() 1314 public static native int setSurroundFormatEnabled(int audioFormat, boolean enabled); in setSurroundFormatEnabled() argument
|
/frameworks/base/core/java/android/bluetooth/ |
D | BluetoothAudioConfig.java | 35 public BluetoothAudioConfig(int sampleRate, int channelConfig, int audioFormat) { in BluetoothAudioConfig() argument 38 mAudioFormat = audioFormat; in BluetoothAudioConfig() 72 int audioFormat = in.readInt(); 73 return new BluetoothAudioConfig(sampleRate, channelConfig, audioFormat);
|
/frameworks/av/cmds/stagefright/ |
D | AudioPlayer.cpp | 148 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; in start() local 151 sAudioFormatToPcmEncoding.map(pcmEncoding, &audioFormat); in start() 155 if (mapMimeToAudioFormat(audioFormat, mime) != OK) { in start() 157 audioFormat = AUDIO_FORMAT_INVALID; in start() 159 ALOGV("Mime type \"%s\" mapped to audio_format 0x%x", mime, audioFormat); in start() 163 if ((audioFormat == AUDIO_FORMAT_AAC) && format->findInt32(kKeyAACAOT, &aacaot)) { in start() 165 mapAACProfileToAudioFormat(audioFormat,(OMX_AUDIO_AACPROFILETYPE) aacaot); in start() 192 offloadInfo.format = audioFormat; in start() 200 mSampleRate, numChannels, channelMask, audioFormat, in start()
|
/frameworks/av/services/oboeservice/ |
D | AAudioServiceEndpointMMAP.cpp | 111 audio_format_t audioFormat = getFormat(); in open() local 112 if (audioFormat == AUDIO_FORMAT_DEFAULT || audioFormat == AUDIO_FORMAT_PCM_FLOAT) { in open() 113 audioFormat = AUDIO_FORMAT_PCM_16_BIT; in open() 115 config.format = audioFormat; in open()
|
/frameworks/base/core/jni/ |
D | android_media_AudioFormat.h | 51 static inline audio_format_t audioFormatToNative(int audioFormat) in audioFormatToNative() argument 53 switch (audioFormat) { in audioFormatToNative()
|
D | android_media_AudioRecord.cpp | 187 jint audioFormat, jint buffSizeInBytes, jintArray jSession, jstring opPackageName, in android_media_AudioRecord_setup() argument 250 audio_format_t format = audioFormatToNative(audioFormat); in android_media_AudioRecord_setup() 252 ALOGE("Error creating AudioRecord: unsupported audio format %d.", audioFormat); in android_media_AudioRecord_setup() 642 jint sampleRateInHertz, jint channelCount, jint audioFormat) { in android_media_AudioRecord_get_min_buff_size() argument 645 sampleRateInHertz, channelCount, audioFormat); in android_media_AudioRecord_get_min_buff_size() 648 audio_format_t format = audioFormatToNative(audioFormat); in android_media_AudioRecord_get_min_buff_size()
|
D | android_media_AudioTrack.cpp | 219 jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession, in android_media_AudioTrack_setup() argument 224 jSampleRate, channelPositionMask, channelIndexMask, audioFormat, buffSizeInBytes, in android_media_AudioTrack_setup() 279 audio_format_t format = audioFormatToNative(audioFormat); in android_media_AudioTrack_setup() 281 ALOGE("Error creating AudioTrack: unsupported audio format %d.", audioFormat); in android_media_AudioTrack_setup() 635 static jint writeToTrack(const sp<AudioTrack>& track, jint audioFormat, const T *data, in writeToTrack() argument 1117 jint sampleRateInHertz, jint channelCount, jint audioFormat) { in android_media_AudioTrack_get_min_buff_size() argument 1127 const audio_format_t format = audioFormatToNative(audioFormat); in android_media_AudioTrack_get_min_buff_size()
|
/frameworks/av/media/libaaudio/src/core/ |
D | AAudioStreamParameters.h | 63 void setFormat(audio_format_t audioFormat) { in setFormat() argument 64 mAudioFormat = audioFormat; in setFormat()
|
/frameworks/base/media/java/android/media/soundtrigger/ |
D | SoundTriggerDetector.java | 124 AudioFormat audioFormat, int captureSession, byte[] data) { in EventPayload() argument 128 mAudioFormat = audioFormat; in EventPayload()
|
/frameworks/base/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/ |
D | MediaDecoder.java | 264 MediaFormat audioFormat = mMediaExtractor.getTrackFormat(mAudioTrackIndex); in onStart() local 265 mAudioTrackDecoder = new AudioTrackDecoder(mAudioTrackIndex, audioFormat, this); in onStart()
|
/frameworks/av/media/libmediaplayerservice/nuplayer/ |
D | NuPlayerRenderer.cpp | 1917 audio_format_t audioFormat = in onOpenAudioSink() local 1924 status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str()); in onOpenAudioSink() 1932 mime.c_str(), audioFormat); in onOpenAudioSink() 1938 if (audioFormat == AUDIO_FORMAT_AAC in onOpenAudioSink() 1942 audioFormat, in onOpenAudioSink() 1952 offloadInfo.format = audioFormat; in onOpenAudioSink() 1976 audioFormat, in onOpenAudioSink() 2023 audioFormat, in onOpenAudioSink() 2062 audioFormat, in onOpenAudioSink()
|
D | NuPlayer.h | 286 void determineAudioModeChange(const sp<AMessage> &audioFormat);
|
/frameworks/base/core/java/android/service/voice/ |
D | AlwaysOnHotwordDetector.java | 216 AudioFormat audioFormat, int captureSession, byte[] data) { in EventPayload() argument 220 mAudioFormat = audioFormat; in EventPayload()
|