MythTV  master
audiooutputaudiotrack.cpp
Go to the documentation of this file.
1 
2 #include "config.h"
3 
4 using namespace std;
5 
6 #include <QAndroidJniObject>
7 #include <QAndroidJniEnvironment>
8 #include <android/log.h>
9 
10 #include "mythlogging.h"
11 #include "audiooutputaudiotrack.h"
12 
13 #define CHANNELS_MIN 1
14 #define CHANNELS_MAX 8
15 
16 #define ANDROID_EXCEPTION_CHECK \
17  if (env->ExceptionCheck()) { \
18  env->ExceptionDescribe(); \
19  env->ExceptionClear(); \
20  exception=true; \
21  } else \
22  exception=false;
23 // clear exception without checking
24 #define ANDROID_EXCEPTION_CLEAR \
25  if (env->ExceptionCheck()) { \
26  env->ExceptionDescribe(); \
27  env->ExceptionClear(); \
28  }
29 
30 #define LOC QString("AudioTrack: ")
31 
32 // Constants from Android Java API
33 // class android.media.AudioFormat
34 #define AF_CHANNEL_OUT_MONO 4
35 #define AF_ENCODING_AC3 5
36 #define AF_ENCODING_E_AC3 6
37 #define AF_ENCODING_DTS 7
38 #define AF_ENCODING_DOLBY_TRUEHD 14
39 #define AF_ENCODING_PCM_8BIT 3
40 #define AF_ENCODING_PCM_16BIT 2
41 #define AF_ENCODING_PCM_FLOAT 4
42 
43 // for debugging
44 #include <android/log.h>
45 
47  AudioOutputBase(settings)
48 {
49  InitSettings(settings);
50  if (settings.m_init)
51  Reconfigure(settings);
52 }
53 
55 {
56  KillAudio();
57 }
58 
60 {
61  bool exception=false;
62  QAndroidJniEnvironment env;
63  jint encoding = 0;
64  jint sampleRate = m_sampleRate;
65 
66  // m_bitsPer10Frames = output bits per 10 frames
68 
69  if ((m_passthru || m_enc) && m_sourceBitRate > 0)
71 
72  // 50 milliseconds
74 
75  if (m_fragmentSize < 1536)
76  m_fragmentSize = 1536;
77 
78 
79  if (m_passthru || m_enc)
80  {
81  switch (m_codec)
82  {
83  case AV_CODEC_ID_AC3:
84  encoding = AF_ENCODING_AC3;
85  break;
86  case AV_CODEC_ID_DTS:
87  encoding = AF_ENCODING_DTS;
88  break;
89  case AV_CODEC_ID_EAC3:
90  encoding = AF_ENCODING_E_AC3;
91  break;
92  case AV_CODEC_ID_TRUEHD:
93  encoding = AF_ENCODING_DOLBY_TRUEHD;
94  break;
95 
96  default:
97  LOG(VB_GENERAL, LOG_ERR, LOC + __func__ + QString(" No support for audio passthru encoding %1").arg(m_codec));
98  return false;
99  }
100  }
101  else
102  {
103  switch (m_outputFormat)
104  {
105  case FORMAT_U8:
106  // This could be used to get the value from java instead // of haning these constants in pour header file.
107  // encoding = QAndroidJniObject::getStaticField<jint>
108  // ("android.media.AudioFormat","ENCODING_PCM_8BIT");
109  encoding = AF_ENCODING_PCM_8BIT;
110  break;
111  case FORMAT_S16:
112  encoding = AF_ENCODING_PCM_16BIT;
113  break;
114  case FORMAT_FLT:
115  encoding = AF_ENCODING_PCM_FLOAT;
116  break;
117  default:
118  LOG(VB_GENERAL, LOG_ERR, LOC + __func__ + QString(" No support for audio format %1").arg(m_outputFormat));
119  return false;
120  }
121  }
122 
123  jint minBufferSize = m_fragmentSize * 4;
124  m_soundcardBufferSize = minBufferSize;
125  jint channels = m_channels;
126 
127  m_audioTrack = new QAndroidJniObject("org/mythtv/audio/AudioOutputAudioTrack",
128  "(IIII)V", encoding, sampleRate, minBufferSize, channels);
130 
131  if (exception)
132  {
133  LOG(VB_GENERAL, LOG_ERR, LOC + __func__ + QString(" Java Exception when creating AudioTrack"));
134  m_audioTrack = nullptr;
135  return false;
136  }
137  if (!m_passthru && !m_enc)
138  {
139  jint bitsPer10Frames = m_bitsPer10Frames;
140  m_audioTrack->callMethod<void>("setBitsPer10Frames","(I)V",bitsPer10Frames);
141  }
142  return true;
143 }
144 
146 {
147  QAndroidJniEnvironment env;
148  if (m_audioTrack)
149  {
150  m_audioTrack->callMethod<void>("release");
152  delete m_audioTrack;
153  m_audioTrack = nullptr;
154  }
155 }
156 
158 {
159  bool exception=false;
160  QAndroidJniEnvironment env;
161  jint bufsize = 0;
162 
163  AudioOutputSettings *settings = new AudioOutputSettings();
164 
165  int supportedrate = 0;
166  while (int rate = settings->GetNextRate())
167  {
168  // Checking for valid rates using getMinBufferSize.
169  // See https://stackoverflow.com/questions/8043387/android-audiorecord-supported-sampling-rates/22317382
170  bufsize = QAndroidJniObject::callStaticMethod<jint>
171  ("android/media/AudioTrack", "getMinBufferSize", "(III)I",
174  if (bufsize > 0 && !exception)
175  {
176  settings->AddSupportedRate(rate);
177  // save any supported rate for later
178  supportedrate = rate;
179  }
180  }
181 
182  // Checking for valid format using getMinBufferSize.
183  bufsize = QAndroidJniObject::callStaticMethod<jint>
184  ("android/media/AudioTrack", "getMinBufferSize", "(III)I",
185  supportedrate, AF_CHANNEL_OUT_MONO, AF_ENCODING_PCM_8BIT);
187  if (bufsize > 0 && !exception)
188  settings->AddSupportedFormat(FORMAT_U8);
189  // 16bit always supported
190  settings->AddSupportedFormat(FORMAT_S16);
191 
192  bufsize = QAndroidJniObject::callStaticMethod<jint>
193  ("android/media/AudioTrack", "getMinBufferSize", "(III)I",
196  if (bufsize > 0 && !exception)
197  settings->AddSupportedFormat(FORMAT_FLT);
198 
199  for (uint channels = CHANNELS_MIN; channels <= CHANNELS_MAX; channels++)
200  {
201  settings->AddSupportedChannels(channels);
202  }
203  settings->setPassthrough(0);
204 
205  return settings;
206 }
207 
208 void AudioOutputAudioTrack::WriteAudio(unsigned char* aubuf, int size)
209 {
210  bool exception=false;
211  QAndroidJniEnvironment env;
212  if (m_actuallyPaused)
213  {
214  if (m_audioTrack)
215  {
216  jboolean param = true;
217  m_audioTrack->callMethod<void>("pause","(Z)V",param);
219  }
220  return;
221  }
222  // create a java byte array
223  jbyteArray arr = env->NewByteArray(size);
224  env->SetByteArrayRegion(arr, 0, size, reinterpret_cast<jbyte*>(aubuf));
225  jint ret = -99;
226  if (m_audioTrack)
227  {
228  ret = m_audioTrack->callMethod<jint>("write","([BI)I", arr, size);
230  }
231  env->DeleteLocalRef(arr);
232  if (ret != size || exception)
233  LOG(VB_GENERAL, LOG_ERR, LOC + __func__
234  + QString(" Audio Write failed, size %1 return %2 exception %3")
235  .arg(size).arg(ret).arg(exception));
236 
237  LOG(VB_AUDIO | VB_TIMESTAMP, LOG_INFO, LOC + __func__
238  + QString(" WriteAudio size=%1 written=%2")
239  .arg(size).arg(ret));
240 }
241 
242 
244 {
245  bool exception=false;
246  QAndroidJniEnvironment env;
247  int buffered (0);
248  if (m_audioTrack)
249  {
250  // This may return a negative value, because there
251  // is data already played that is still in the "Audio circular buffer"
252  buffered
253  = m_audioTrack->callMethod<jint>("getBufferedBytes");
255  if (exception)
256  buffered = 0;
257  int latency
258  = m_audioTrack->callMethod<jint>("getLatencyViaHeadPosition");
260  if (exception)
261  latency = 0;
262  buffered += latency * m_sampleRate / 1000 * m_bitsPer10Frames / 80 ;
263  }
264 
265  return buffered;
266 }
267 
268 bool AudioOutputAudioTrack::AddData(void *in_buffer, int in_len,
269  int64_t timecode, int in_frames)
270 {
271  bool ret = AudioOutputBase::AddData
272  (in_buffer, in_len, timecode,in_frames);
273 
274  return ret;
275 }
276 
278 {
279  AudioOutputBase::Pause(paused);
280  if (m_audioTrack)
281  {
282  jboolean param = paused;
283  m_audioTrack->callMethod<void>("pause","(Z)V",param);
284  }
285 }
286 
288 {
290  if (m_sourceBitRate > 0
291  && (m_passthru || m_enc)
292  && m_audioTrack)
293  {
295  jint bitsPer10Frames = m_bitsPer10Frames;
296  m_audioTrack->callMethod<void>("setBitsPer10Frames","(I)V",bitsPer10Frames);
297  }
298 }
299 
301 {
302  QAndroidJniEnvironment env;
303  if (m_audioTrack)
304  {
305  m_audioTrack->callMethod<void>("setOutputThread","(Z)V",true);
307  }
308 
310 }
311 
313 {
314  QAndroidJniEnvironment env;
315  if (m_audioTrack)
316  {
317  m_audioTrack->callMethod<void>("setOutputThread","(Z)V",false);
319  }
320 
322 }
void InitSettings(const AudioSettings &settings)
void SetSourceBitrate(int rate) override
#define AF_CHANNEL_OUT_MONO
void WriteAudio(unsigned char *aubuf, int size) override
bool OpenDevice(void) override
void setPassthrough(int val)
virtual void StopOutputThread(void)
#define AF_ENCODING_PCM_16BIT
bool AddData(void *buffer, int len, int64_t timecode, int frames) override
Add data to the audiobuffer and perform any required processing.
#define AF_ENCODING_AC3
#define ANDROID_EXCEPTION_CHECK
void SetSourceBitrate(int rate) override
Set the bitrate of the source material, reported in periodic OutputEvents.
QAndroidJniObject * m_audioTrack
bool StartOutputThread(void) override
#define AF_ENCODING_DTS
unsigned int uint
Definition: compat.h:140
#define ANDROID_EXCEPTION_CLEAR
#define AF_ENCODING_PCM_FLOAT
void CloseDevice(void) override
virtual bool StartOutputThread(void)
AudioOutputSettings * GetOutputSettings(bool digital) override
#define LOG(_MASK_, _LEVEL_, _STRING_)
Definition: mythlogging.h:41
#define AF_ENCODING_DOLBY_TRUEHD
void AddSupportedRate(int rate)
#define CHANNELS_MAX
AudioFormat m_outputFormat
void Pause(bool paused) override
void KillAudio(void)
Kill the output thread and cleanup.
void Reconfigure(const AudioSettings &settings) override
(Re)Configure AudioOutputBase
#define AF_ENCODING_E_AC3
int GetBufferedOnSoundcard(void) const override
Return the size in bytes of frames currently in the audio buffer adjusted with the audio playback lat...
#define AF_ENCODING_PCM_8BIT
#define LOC
void AddSupportedFormat(AudioFormat format)
#define CHANNELS_MIN
bool AddData(void *buffer, int len, int64_t timecode, int frames) override
Add data to the audiobuffer for playback.
void Pause(bool paused) override
void AddSupportedChannels(int channels)
bool m_init
If set to false, AudioOutput instance will not try to initially open the audio device.
Definition: audiosettings.h:84
AudioOutputAudioTrack(const AudioSettings &settings)
void StopOutputThread(void) override