Paradox Game Engine  v1.0.0 beta06
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Events Macros Pages
SoundEffectInstance.Android.cs
Go to the documentation of this file.
1 // Copyright (c) 2014 Silicon Studio Corp. (http://siliconstudio.co.jp)
2 // This file is distributed under GPL v3. See LICENSE.md for details.
3 #if SILICONSTUDIO_PLATFORM_ANDROID
4 
5 using System;
6 using System.Collections.Generic;
7 using System.Reflection;
8 using System.Runtime.InteropServices;
9 
10 using Android.Media;
11 using Android.Runtime;
12 
13 using SiliconStudio.Core.Mathematics;
14 using SiliconStudio.Paradox.Audio.Wave;
15 
16 using Math = System.Math;
17 
18 namespace SiliconStudio.Paradox.Audio
19 {
20  partial class SoundEffectInstance
21  {
22  private const int MaximumNumberOfTracks = 8;
23  private const int NumberOfSubBuffersInAudioTrack = 2;
24  protected internal const int SoundEffectInstanceFrameRate = 44100;
25 
26  private static readonly Queue<TrackInfo> audioTrackPool = new Queue<TrackInfo>();
27 
28  private static readonly IntPtr audioTrackClassJavaHandle = JNIEnv.FindClass("android/media/AudioTrack");
29  private static readonly IntPtr audioTrackWriteMethodID = JNIEnv.GetMethodID(audioTrackClassJavaHandle, "write", "([BII)I");
30 
31  private delegate void SetJavaByteArrayRegionDelegate(IntPtr handle, IntPtr destination, int offset, int length, IntPtr sourceByteArray);
32 
33  private static SetJavaByteArrayRegionDelegate setJavaByteArray;
34 
35  private static IntPtr blankJavaDataBuffer;
36 
37  private int readBufferPosition;
38  private int writeBufferPosition;
39 
40  private TrackInfo currentTrack;
41  private readonly object currentTrackLock = new object();
42 
43  private bool exitLoopRequested;
44 
45  private class TrackInfo : IDisposable
46  {
47  private readonly byte[] dataBuffer;
48 
49  private readonly int bufferSize;
50 
51  private readonly IntPtr javaDataBuffer;
52 
53  private readonly JValue[] javaWriteCallValues;
54 
55  public readonly AudioTrack Track;
56 
57  public int BuffersToWrite;
58 
59  public bool ShouldStop;
60 
61  public SoundEffectInstance CurrentInstance;
62 
63  public TrackInfo(AudioTrack track, IntPtr javaDataBuffer, byte[] dataBuffer, int bufferSize)
64  {
65  Track = track;
66  this.javaDataBuffer = javaDataBuffer;
67  this.dataBuffer = dataBuffer;
68  this.bufferSize = bufferSize;
69 
70  javaWriteCallValues = new JValue[3];
71 
72  // add the callback feeding the audio track and updating play status
73  Track.PeriodicNotification += (sender, args) => OnPeriodFinished();
74  var status = Track.SetPositionNotificationPeriod(bufferSize / 4); // in frame number ( 2 channels * 2 byte data = 4)
75  if (status != TrackStatus.Success)
76  throw new AudioSystemInternalException("AudioTrack.SetNotificationMarkerPosition failed and failure was not handled. [error=" + status + "].");
77  }
78 
79  /// <summary>
80  /// Should be called with the <see cref="TrackInfo"/> locked.
81  /// </summary>
82  public void WriteNextAudioBufferToTrack()
83  {
84  --BuffersToWrite;
85 
86  var instance = CurrentInstance;
87  var soundEffect = instance.soundEffect;
88 
89  var sizeWriten = 0;
90  while (sizeWriten < bufferSize)
91  {
92  var sizeToWrite = bufferSize - sizeWriten;
93  var shouldWriteBlank = instance.writeBufferPosition >= soundEffect.WaveDataSize;
94 
95  if (!shouldWriteBlank)
96  {
97  sizeToWrite = Math.Min(sizeToWrite, soundEffect.WaveDataSize - instance.writeBufferPosition);
98 
99  if (setJavaByteArray == null)
100  {
101  Array.Copy(soundEffect.WaveDataArray, instance.writeBufferPosition, dataBuffer, 0, sizeToWrite);
102  JNIEnv.CopyArray(dataBuffer, javaDataBuffer);
103  }
104  else
105  {
106  setJavaByteArray(JNIEnv.Handle, javaDataBuffer, 0, sizeToWrite, soundEffect.WaveDataPtr + instance.writeBufferPosition);
107  }
108  }
109 
110  javaWriteCallValues[0] = new JValue(shouldWriteBlank ? blankJavaDataBuffer : javaDataBuffer);
111  javaWriteCallValues[1] = new JValue(0);
112  javaWriteCallValues[2] = new JValue(sizeToWrite);
113 
114  var writtenSize = JNIEnv.CallIntMethod(Track.Handle, audioTrackWriteMethodID, javaWriteCallValues);
115 
116  sizeWriten += writtenSize;
117  instance.writeBufferPosition += writtenSize;
118 
119  if (instance.writeBufferPosition >= soundEffect.WaveDataSize && instance.IsLooped && !instance.exitLoopRequested)
120  instance.writeBufferPosition = 0;
121 
122  if (writtenSize != sizeToWrite) // impossible the write all the data due to a call to pause or stop
123  break; // all next call to WriteDataToAudioTrack will write 0 byte until next play.
124  }
125  }
126 
127  private void OnPeriodFinished()
128  {
129  int audioDataBufferSize;
130  SoundEffectInstance instance;
131 
132  lock (this)
133  {
134  ++BuffersToWrite;
135  if (BuffersToWrite == NumberOfSubBuffersInAudioTrack)
136  Track.Stop();
137 
138  instance = CurrentInstance;
139  if(instance == null)
140  return;
141 
142  instance.readBufferPosition += bufferSize;
143  audioDataBufferSize = instance.soundEffect.WaveDataSize;
144 
145  while (instance.readBufferPosition >= audioDataBufferSize && instance.IsLooped && !instance.exitLoopRequested)
146  instance.readBufferPosition -= audioDataBufferSize;
147 
148  if (instance.readBufferPosition < audioDataBufferSize && !ShouldStop)
149  WriteNextAudioBufferToTrack();
150  }
151  if ((instance.readBufferPosition >= audioDataBufferSize || ShouldStop) && instance.PlayState != SoundPlayState.Paused)
152  instance.Stop();
153  }
154 
155  public void Dispose()
156  {
157  Track.Release();
158  Track.Dispose();
159  JNIEnv.DeleteGlobalRef(javaDataBuffer);
160  }
161  }
162 
163  internal static void CreateAudioTracks()
164  {
165  const int audioMemoryOS = 1024 * 1024; // the audio client have only this amount of memory available for streaming (see: AudioFlinger::Client constructor -> https://android.googlesource.com/platform/frameworks/av/+/126a630/services/audioflinger/AudioFlinger.cpp : line 1153)
166  const int memoryDealerHeaderSize = 64; // size taken by the header of each memory section of the MemoryDealer.
167 
168  GetSetArrayRegionFunctionPointer();
169 
170  // the minimum size that can have an audio track in streaming mode (with that audio format)
171  var minimumBufferSize = AudioTrack.GetMinBufferSize(SoundEffectInstanceFrameRate, ChannelOut.Stereo, Encoding.Pcm16bit);
172 
173  // the size that should be kept in order to be able to play sound music correctly (note: we need to be able to play 2 music simultaneously because destruction is asynchronous)
174  var memoryNeededForSoundMusic = 2 * (GetUpperPowerOfTwo(minimumBufferSize) + memoryDealerHeaderSize);
175 
176  // the size taken by one of our sub-buffers => 2 bytes (16 bits sample) * 2 channels * 30 ms at 44100Hz
177  var subBufferSize = Math.Max((int)Math.Ceiling(minimumBufferSize / (float)NumberOfSubBuffersInAudioTrack), 2 * 2 * 8000);
178 
179  // the memory taken by one audio track creation for sound effects
180  var memoryNeededAudioTrack = GetUpperPowerOfTwo(subBufferSize*NumberOfSubBuffersInAudioTrack);
181 
182  // the java buffer used to copy blank sound data
183  blankJavaDataBuffer = JNIEnv.NewGlobalRef(JNIEnv.NewArray(new byte[subBufferSize]));
184 
185  // create the pool of audio tracks
186  var trackNumber = 0;
187  while (trackNumber < MaximumNumberOfTracks && audioMemoryOS - (trackNumber+1) * memoryNeededAudioTrack >= memoryNeededForSoundMusic)
188  {
189  // create the audio track
190  var audioTrack = new AudioTrack(Stream.Music, SoundEffectInstanceFrameRate, ChannelOut.Stereo, Encoding.Pcm16bit,
191  NumberOfSubBuffersInAudioTrack * subBufferSize, AudioTrackMode.Stream);
192 
193  if (audioTrack.State == AudioTrackState.Uninitialized) // the maximum number of tracks is reached
194  break;
195 
196  // create the c# buffer for internal copy
197  var dataBuffer = new byte[subBufferSize];
198 
199  // create the java buffer
200  var javaDataBuffer = JNIEnv.NewGlobalRef(JNIEnv.NewArray(dataBuffer));
201 
202  // add the new track to the audio track pool
203  var newTrackInfo = new TrackInfo(audioTrack, javaDataBuffer, dataBuffer, subBufferSize) { BuffersToWrite = NumberOfSubBuffersInAudioTrack };
204  audioTrackPool.Enqueue(newTrackInfo);
205 
206  ++trackNumber;
207  }
208  }
209 
210  private static int GetUpperPowerOfTwo(int size)
211  {
212  var upperPowerOfTwo = 2;
213  while (upperPowerOfTwo < size)
214  upperPowerOfTwo = upperPowerOfTwo << 1;
215 
216  return upperPowerOfTwo;
217  }
218 
219  /// <summary>
220  /// Hack using reflection to get a pointer to java jni SetByteArrayRegion function pointer
221  /// </summary>
222  private static void GetSetArrayRegionFunctionPointer()
223  {
224  // ReSharper disable PossibleNullReferenceException
225  try
226  {
227  var jniEnvGetter = typeof(JNIEnv).GetMethod("get_Env", BindingFlags.Static | BindingFlags.NonPublic);
228  var jniEnvInstanceField = jniEnvGetter.ReturnType.GetField("JniEnv", BindingFlags.NonPublic | BindingFlags.Instance);
229  var setByteArrayFunctionField = jniEnvInstanceField.FieldType.GetField("SetByteArrayRegion", BindingFlags.Public | BindingFlags.Instance);
230 
231  var jniEnvInstance = jniEnvInstanceField.GetValue(jniEnvGetter.Invoke(null, null));
232  var pointerToSetByteArrayFunction = (IntPtr)setByteArrayFunctionField.GetValue(jniEnvInstance);
233 
234  setJavaByteArray = Marshal.GetDelegateForFunctionPointer<SetJavaByteArrayRegionDelegate>(pointerToSetByteArrayFunction);
235  }
236  catch (Exception)
237  {
238  setJavaByteArray = null;
239  }
240  // ReSharper restore PossibleNullReferenceException
241  }
242 
243  internal static void StaticDestroy()
244  {
245  JNIEnv.DeleteGlobalRef(blankJavaDataBuffer);
246 
247  // release created audio tracks and java buffers.
248  foreach (var trackInfo in audioTrackPool)
249  trackInfo.Dispose();
250 
251  audioTrackPool.Clear();
252  }
253 
254  internal void UpdateStereoVolumes()
255  {
256  lock (currentTrackLock)
257  {
258  if (currentTrack == null) // did not manage to obtain a track
259  return;
260 
261  // both Volume, panChannelVolumes, localizationChannelVolumes are in [0,1] so multiplication too, no clamp is needed.
262  var status = currentTrack.Track.SetStereoVolume(Volume * panChannelVolumes[0] * localizationChannelVolumes[0], Volume * panChannelVolumes[1] * localizationChannelVolumes[1]);
263  if (status != TrackStatus.Success)
264  throw new AudioSystemInternalException("AudioTrack.SetStereoVolume failed and failure was not handled. [error:" + status + "]");
265  }
266  }
267 
268  internal override void UpdateLooping()
269  {
270  }
271 
272  internal override void PlayImpl()
273  {
274  lock (currentTrackLock)
275  {
276  if (currentTrack != null && PlayState != SoundPlayState.Paused)
277  throw new AudioSystemInternalException("AudioTrack.PlayImpl was called with play state '" + PlayState + "' and currentTrackInfo not null.");
278 
279  if (currentTrack == null) // the audio instance is stopped.
280  {
281  currentTrack = TryGetAudioTack();
282  if (currentTrack == null) // could not obtain a track -> give up and early return
283  {
284  AudioEngine.Logger.Info("Failed to obtain an audio track for SoundEffectInstance '{0}'. Play call will be ignored.",Name);
285  return;
286  }
287 
288  // Update track state
289  UpdateLooping();
290  UpdatePitch();
291  UpdateStereoVolumes();
292  }
293 
294  lock (currentTrack)
295  {
296  currentTrack.CurrentInstance = this;
297  currentTrack.ShouldStop = false;
298 
299  currentTrack.Track.Play();
300 
301  while (currentTrack.BuffersToWrite > 0)
302  currentTrack.WriteNextAudioBufferToTrack();
303  }
304  }
305  }
306 
307  internal override void PauseImpl()
308  {
309  lock (currentTrackLock)
310  {
311  if (currentTrack == null) // did not manage to obtain a track
312  return;
313 
314  currentTrack.ShouldStop = true;
315  }
316  }
317 
318  internal override void StopImpl()
319  {
320  exitLoopRequested = false;
321 
322  lock (currentTrackLock)
323  {
324  if (currentTrack == null) // did not manage to obtain a track
325  return;
326 
327  // update tack info
328  lock (currentTrack)
329  {
330  currentTrack.ShouldStop = true;
331  currentTrack.CurrentInstance = null;
332  }
333 
334  // reset playback position
335  readBufferPosition = 0;
336  writeBufferPosition = 0;
337 
338  // add the track back to the tracks pool
339  lock (audioTrackPool)
340  audioTrackPool.Enqueue(currentTrack);
341 
342  // avoid concurrency problems with EndOfTrack callback
343  lock (currentTrackLock)
344  currentTrack = null;
345  }
346  }
347 
348  internal override void ExitLoopImpl()
349  {
350  exitLoopRequested = true;
351  }
352 
353  internal virtual void CreateVoice(WaveFormat format)
354  {
355  // nothing to do here
356  }
357 
358  private TrackInfo TryGetAudioTack()
359  {
360  // try to get a track from the pool
361  lock (audioTrackPool)
362  {
363  if (audioTrackPool.Count > 0)
364  return audioTrackPool.Dequeue();
365  }
366 
367  // pool was empty -> try to stop irrelevant instances to free a track
368  var soundEffectToStop = AudioEngine.GetLeastSignificativeSoundEffect();
369  if (soundEffectToStop == null)
370  return null;
371 
372  // stop the sound effect instances and retry to get a track
373  soundEffectToStop.StopAllInstances();
374 
375  lock (audioTrackPool)
376  {
377  if (audioTrackPool.Count > 0)
378  return audioTrackPool.Dequeue();
379  }
380 
381  return null;
382  }
383 
384  internal override void LoadBuffer()
385  {
386  }
387 
388  internal virtual void DestroyVoice()
389  {
390  lock (currentTrackLock)
391  {
392  if (currentTrack != null) // the voice has not been destroyed in the previous stop or the instance has not been stopped
393  throw new AudioSystemInternalException("The AudioTrackInfo was not null when destroying the SoundEffectInstance.");
394  }
395  }
396 
397  internal void UpdatePitch()
398  {
399  lock (currentTrackLock)
400  {
401  if (currentTrack == null) // did not manage to obtain a track
402  return;
403 
404  var status = currentTrack.Track.SetPlaybackRate((int)(MathUtil.Clamp((float)Math.Pow(2, Pitch) * dopplerPitchFactor, 0.5f, 2f) * SoundEffectInstanceFrameRate)); // conversion octave to frequency
405  if (status != (int)TrackStatus.Success)
406  throw new AudioSystemInternalException("AudioTrack.SetPlaybackRate failed and failure was not handled. [error:" + status + "]");
407  }
408  }
409 
410  internal virtual void PlatformSpecificDisposeImpl()
411  {
412  DestroyVoice();
413  }
414 
415  private void Apply3DImpl(AudioListener listener, AudioEmitter emitter)
416  {
417  // Since android has no function available to perform sound 3D localization by default, here we try to mimic the behaviour of XAudio2
418 
419  // After an analysis of the XAudio2 left/right stereo balance with respect to 3D world position,
420  // it could be found the volume repartition is symmetric to the Up/Down and Front/Back planes.
421  // Moreover the left/right repartition seems to follow a third degree polynomial function:
422  // Volume_left(a) = 2(c-1)*a^3 - 3(c-1)*a^2 + c*a , where c is a constant close to c = 1.45f and a is the angle normalized bwt [0,1]
423  // Volume_right(a) = 1-Volume_left(a)
424 
425  // As for signal attenuation wrt distance the model follows a simple inverse square law function as explained in XAudio2 documentation
426  // ( http://msdn.microsoft.com/en-us/library/windows/desktop/microsoft.directx_sdk.x3daudio.x3daudio_emitter(v=vs.85).aspx )
427  // Volume(d) = 1 , if d <= ScaleDistance where d is the distance to the listener
428  // Volume(d) = ScaleDistance / d , if d >= ScaleDistance where d is the distance to the listener
429 
430  // 1. Attenuation due to distance.
431  var vecListEmit = emitter.Position - listener.Position;
432  var distListEmit = vecListEmit.Length();
433  var attenuationFactor = distListEmit <= emitter.DistanceScale ? 1f : emitter.DistanceScale / distListEmit;
434 
435  // 2. Left/Right balance.
436  var repartRight = 0.5f;
437  var worldToList = Matrix.Identity;
438  var rightVec = Vector3.Cross(listener.Forward, listener.Up);
439  worldToList.Column1 = new Vector4(rightVec, 0);
440  worldToList.Column2 = new Vector4(listener.Forward, 0);
441  worldToList.Column3 = new Vector4(listener.Up, 0);
442  var vecListEmitListBase = Vector3.TransformNormal(vecListEmit, worldToList);
443  var vecListEmitListBase2 = (Vector2)vecListEmitListBase;
444  if (vecListEmitListBase2.Length() > 0)
445  {
446  const float c = 1.45f;
447  var absAlpha = Math.Abs(Math.Atan2(vecListEmitListBase2.Y, vecListEmitListBase2.X));
448  var normAlpha = (float)(absAlpha / (Math.PI / 2));
449  if (absAlpha > Math.PI / 2) normAlpha = 2 - normAlpha;
450  repartRight = 0.5f * (2 * (c - 1) * normAlpha * normAlpha * normAlpha - 3 * (c - 1) * normAlpha * normAlpha * normAlpha + c * normAlpha);
451  if (absAlpha > Math.PI / 2) repartRight = 1 - repartRight;
452  }
453 
454  // Set the volumes.
455  localizationChannelVolumes = new[] { attenuationFactor * (1f - repartRight), attenuationFactor * repartRight };
456  UpdateStereoVolumes();
457 
458  // 3. Calculation of the Doppler effect
459  ComputeDopplerFactor(listener, emitter);
460  UpdatePitch();
461  }
462 
463  private void Reset3DImpl()
464  {
465  // nothing to do here.
466  }
467 
468  internal override void UpdateVolume()
469  {
470  UpdateStereoVolumes();
471  }
472 
473  private void UpdatePan()
474  {
475  UpdateStereoVolumes();
476  }
477  }
478 }
479 
480 #endif
SiliconStudio.Paradox.Games.Mathematics.Vector2 Vector2
System.Text.Encoding Encoding
SoundPlayState
Current state (playing, paused, or stopped) of a sound implementing the IPlayableSound interface...
_In_ size_t _In_ size_t _In_ DXGI_FORMAT format
Definition: DirectXTexP.h:175
_In_ size_t _In_ size_t size
Definition: DirectXTexP.h:175