diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index d922db6c..00000000
--- a/.travis.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-# runs on mono, so we can't build the Win 8 or Universal projects
-# docs here: https://docs.travis-ci.com/user/languages/csharp/
-language: csharp
-solution: NAudio.sln
-script:
- - xbuild /p:Configuration=Release NAudio.sln
- - mono ./packages/NUnit.*/tools/nunit-console.exe /exclude=IntegrationTest ./NAudioTests/bin/Release/NAudioTests.dll
diff --git a/NAudio.Universal/NAudio.Universal.csproj b/NAudio.Universal/NAudio.Universal.csproj
index 95a543fd..e32400fb 100644
--- a/NAudio.Universal/NAudio.Universal.csproj
+++ b/NAudio.Universal/NAudio.Universal.csproj
@@ -26,6 +26,7 @@
DEBUG;TRACE;NETFX_CORE;WINDOWS_UAP
prompt
4
+ bin\Debug\NAudio.Universal.XML
AnyCPU
@@ -35,6 +36,7 @@
TRACE;NETFX_CORE;WINDOWS_UAP
prompt
4
+ bin\Release\NAudio.Universal.XML
ARM
@@ -114,15 +116,6 @@
-
- Wave\WaveInputs\WasapiCaptureRT.cs
-
-
- Wave\WaveOutputs\WasapiOutRT.cs
-
-
- Wave\WaveOutputs\WaveFileWriterRT.cs
-
Codecs\ALawDecoder.cs
@@ -828,7 +821,10 @@
+
+
+
diff --git a/NAudio.Universal/Properties/AssemblyInfo.cs b/NAudio.Universal/Properties/AssemblyInfo.cs
index 7627889a..b23c0f55 100644
--- a/NAudio.Universal/Properties/AssemblyInfo.cs
+++ b/NAudio.Universal/Properties/AssemblyInfo.cs
@@ -10,7 +10,7 @@
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("NAudio.Universal")]
-[assembly: AssemblyCopyright("Copyright © Mark Heath 2015")]
+[assembly: AssemblyCopyright("Copyright © Mark Heath 2017")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
diff --git a/NAudio.Win8/Wave/WaveInputs/WasapiCaptureRT.cs b/NAudio.Universal/Wave/WaveInputs/WasapiCaptureRT.cs
similarity index 84%
rename from NAudio.Win8/Wave/WaveInputs/WasapiCaptureRT.cs
rename to NAudio.Universal/Wave/WaveInputs/WasapiCaptureRT.cs
index bee798b4..6aa1aa37 100644
--- a/NAudio.Win8/Wave/WaveInputs/WasapiCaptureRT.cs
+++ b/NAudio.Universal/Wave/WaveInputs/WasapiCaptureRT.cs
@@ -1,381 +1,372 @@
-using System;
-using System.Collections.Generic;
-using System.Text;
-using System.Threading.Tasks;
-using NAudio.CoreAudioApi;
-using NAudio.CoreAudioApi.Interfaces;
-using NAudio.Wave;
-using System.Threading;
-using System.Diagnostics;
-using System.Runtime.InteropServices;
-using NAudio.Win8.Wave.WaveOutputs;
-using Windows.Devices.Enumeration;
-using Windows.Media.Devices;
-
-namespace NAudio.Wave
-{
- enum WasapiCaptureState
- {
- Uninitialized,
- Stopped,
- Recording,
- Disposed
- }
-
- ///
- /// Audio Capture using Wasapi
- /// See http://msdn.microsoft.com/en-us/library/dd370800%28VS.85%29.aspx
- ///
- public class WasapiCaptureRT : IWaveIn
- {
- static readonly Guid IID_IAudioClient2 = new Guid("726778CD-F60A-4eda-82DE-E47610CD78AA");
- private const long REFTIMES_PER_SEC = 10000000;
- private const long REFTIMES_PER_MILLISEC = 10000;
- private volatile WasapiCaptureState captureState;
- private byte[] recordBuffer;
- private readonly string device;
- private int bytesPerFrame;
- private WaveFormat waveFormat;
- private AudioClient audioClient;
- private IntPtr hEvent;
- private Task captureTask;
- private SynchronizationContext syncContext;
-
- ///
- /// Indicates recorded data is available
- ///
- public event EventHandler DataAvailable;
-
- ///
- /// Indicates that all recorded data has now been received.
- ///
- public event EventHandler RecordingStopped;
- private int latencyMilliseconds;
-
- ///
- /// Initialises a new instance of the WASAPI capture class
- ///
- public WasapiCaptureRT() :
- this(GetDefaultCaptureDevice())
- {
- }
-
- ///
- /// Initialises a new instance of the WASAPI capture class
- ///
- /// Capture device to use
- public WasapiCaptureRT(string device)
- {
- this.device = device;
- this.syncContext = SynchronizationContext.Current;
- //this.waveFormat = audioClient.MixFormat;
- }
-
- ///
- /// Recording wave format
- ///
- public virtual WaveFormat WaveFormat
- {
- get
- {
- // for convenience, return a WAVEFORMATEX, instead of the real
- // WAVEFORMATEXTENSIBLE being used
- var wfe = waveFormat as WaveFormatExtensible;
- if (wfe != null)
- {
- try
- {
- return wfe.ToStandardWaveFormat();
- }
- catch (InvalidOperationException)
- {
- // couldn't convert to a standard format
- }
- }
- return waveFormat;
- }
- set { waveFormat = value; }
- }
-
- ///
- /// Way of enumerating all the audio capture devices available on the system
- ///
- ///
- public async static Task> GetCaptureDevices()
- {
- var audioCaptureSelector = MediaDevice.GetAudioCaptureSelector();
-
- // (a PropertyKey)
- var supportsEventDrivenMode = "{1da5d803-d492-4edd-8c23-e0c0ffee7f0e} 7";
-
- var captureDevices = await DeviceInformation.FindAllAsync(audioCaptureSelector, new[] { supportsEventDrivenMode } );
- return captureDevices;
- }
-
- ///
- /// Gets the default audio capture device
- ///
- /// The default audio capture device
- public static string GetDefaultCaptureDevice()
- {
- var defaultCaptureDeviceId = MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default);
- return defaultCaptureDeviceId;
- }
-
- ///
- /// Initializes the capture device. Must be called on the UI (STA) thread.
- /// If not called manually then StartRecording() will call it internally.
- ///
- public async Task InitAsync()
- {
- if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT));
- if (captureState != WasapiCaptureState.Uninitialized) throw new InvalidOperationException("Already initialized");
-
- var icbh = new ActivateAudioInterfaceCompletionHandler(ac2 => InitializeCaptureDevice((IAudioClient)ac2));
- IActivateAudioInterfaceAsyncOperation activationOperation;
- // must be called on UI thread
- NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation);
- audioClient = new AudioClient((IAudioClient)(await icbh));
-
- hEvent = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
- audioClient.SetEventHandle(hEvent);
-
- captureState = WasapiCaptureState.Stopped;
- }
-
- private void InitializeCaptureDevice(IAudioClient audioClientInterface)
- {
- var audioClient = new AudioClient((IAudioClient)audioClientInterface);
- if (waveFormat == null)
- {
- waveFormat = audioClient.MixFormat;
- }
-
- long requestedDuration = REFTIMES_PER_MILLISEC * 100;
-
-
- if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, waveFormat))
- {
- throw new ArgumentException("Unsupported Wave Format");
- }
-
- var streamFlags = GetAudioClientStreamFlags();
-
- audioClient.Initialize(AudioClientShareMode.Shared,
- streamFlags,
- requestedDuration,
- 0,
- waveFormat,
- Guid.Empty);
-
-
- int bufferFrameCount = audioClient.BufferSize;
- this.bytesPerFrame = this.waveFormat.Channels * this.waveFormat.BitsPerSample / 8;
- this.recordBuffer = new byte[bufferFrameCount * bytesPerFrame];
- Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length));
-
- // Get back the effective latency from AudioClient
- latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
- }
-
- ///
- /// To allow overrides to specify different flags (e.g. loopback)
- ///
- protected virtual AudioClientStreamFlags GetAudioClientStreamFlags()
- {
- return AudioClientStreamFlags.EventCallback;
- }
-
- ///
- /// Start Recording
- ///
- public async void StartRecording()
- {
- if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT));
- if (captureState == WasapiCaptureState.Uninitialized) await InitAsync();
-
- captureState = WasapiCaptureState.Recording;
-
- captureTask = Task.Run(() => DoRecording());
-
- Debug.WriteLine("Recording...");
- }
-
- ///
- /// Stop Recording
- ///
- public void StopRecording()
- {
- if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT));
- if (captureState != WasapiCaptureState.Recording) return;
-
- captureState = WasapiCaptureState.Stopped;
- captureTask?.Wait(5000);
- Debug.WriteLine("WasapiCaptureRT stopped");
- }
-
- private void DoRecording()
- {
- Debug.WriteLine("Recording buffer size: " + audioClient.BufferSize);
-
- var buf = new Byte[audioClient.BufferSize * bytesPerFrame];
-
- int bufLength = 0;
- int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms
-
- try
- {
- AudioCaptureClient capture = audioClient.AudioCaptureClient;
- audioClient.Start();
-
- int packetSize = capture.GetNextPacketSize();
-
- while (captureState == WasapiCaptureState.Recording)
- {
- IntPtr pData = IntPtr.Zero;
- int numFramesToRead = 0;
- AudioClientBufferFlags dwFlags = 0;
-
- if (packetSize == 0)
- {
- if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0)
- {
- throw new Exception("Capture event timeout");
- }
- }
-
- pData = capture.GetBuffer(out numFramesToRead, out dwFlags);
-
- if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0)
- {
- pData = IntPtr.Zero;
- }
-
- if (numFramesToRead == 0) { continue; }
-
- int capturedBytes = numFramesToRead * bytesPerFrame;
-
- if (pData == IntPtr.Zero)
- {
- Array.Clear(buf, bufLength, capturedBytes);
- }
- else
- {
- Marshal.Copy(pData, buf, bufLength, capturedBytes);
- }
-
- bufLength += capturedBytes;
-
- capture.ReleaseBuffer(numFramesToRead);
-
- if (bufLength >= minPacketSize)
- {
- if (DataAvailable != null)
- {
- DataAvailable(this, new WaveInEventArgs(buf, bufLength));
- }
- bufLength = 0;
- }
-
- packetSize = capture.GetNextPacketSize();
- }
- }
- catch (Exception ex)
- {
- RaiseRecordingStopped(ex);
- Debug.WriteLine("stop wasapi");
- }
- finally
- {
- RaiseRecordingStopped(null);
-
- audioClient.Stop();
- }
- Debug.WriteLine("stop wasapi");
- }
-
- private void RaiseRecordingStopped(Exception exception)
- {
- var handler = RecordingStopped;
- if (handler != null)
- {
- if (this.syncContext == null)
- {
- handler(this, new StoppedEventArgs(exception));
- }
- else
- {
- syncContext.Post(state => handler(this, new StoppedEventArgs(exception)), null);
- }
- }
- }
-
- private void ReadNextPacket(AudioCaptureClient capture)
- {
- IntPtr buffer;
- int framesAvailable;
- AudioClientBufferFlags flags;
- int packetSize = capture.GetNextPacketSize();
- int recordBufferOffset = 0;
- //Debug.WriteLine(string.Format("packet size: {0} samples", packetSize / 4));
-
- while (packetSize != 0)
- {
- buffer = capture.GetBuffer(out framesAvailable, out flags);
-
- int bytesAvailable = framesAvailable * bytesPerFrame;
-
- // apparently it is sometimes possible to read more frames than we were expecting?
- // fix suggested by Michael Feld:
- int spaceRemaining = Math.Max(0, recordBuffer.Length - recordBufferOffset);
- if (spaceRemaining < bytesAvailable && recordBufferOffset > 0)
- {
- if (DataAvailable != null) DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset));
- recordBufferOffset = 0;
- }
-
- // if not silence...
- if ((flags & AudioClientBufferFlags.Silent) != AudioClientBufferFlags.Silent)
- {
- Marshal.Copy(buffer, recordBuffer, recordBufferOffset, bytesAvailable);
- }
- else
- {
- Array.Clear(recordBuffer, recordBufferOffset, bytesAvailable);
- }
- recordBufferOffset += bytesAvailable;
- capture.ReleaseBuffer(framesAvailable);
- packetSize = capture.GetNextPacketSize();
- }
- if (DataAvailable != null)
- {
- DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset));
- }
- }
-
- ///
- /// Dispose
- ///
- public void Dispose()
- {
- if (captureState == WasapiCaptureState.Disposed) return;
-
- try
- {
- StopRecording();
-
- NativeMethods.CloseHandle(hEvent);
- audioClient?.Dispose();
- }
- catch (Exception ex)
- {
- Debug.WriteLine("Exception disposing WasapiCaptureRT: " + ex.ToString());
- }
-
- hEvent = IntPtr.Zero;
- audioClient = null;
-
- captureState = WasapiCaptureState.Disposed;
- }
- }
-}
+using System;
+using System.Collections.Generic;
+using System.Threading.Tasks;
+using NAudio.CoreAudioApi;
+using NAudio.CoreAudioApi.Interfaces;
+using System.Threading;
+using System.Diagnostics;
+using System.Runtime.InteropServices;
+using NAudio.Win8.Wave.WaveOutputs;
+using Windows.Devices.Enumeration;
+using Windows.Media.Devices;
+
+namespace NAudio.Wave
+{
+ enum WasapiCaptureState
+ {
+ Uninitialized,
+ Stopped,
+ Recording,
+ Disposed
+ }
+
+ ///
+ /// Audio Capture using Wasapi
+ /// See http://msdn.microsoft.com/en-us/library/dd370800%28VS.85%29.aspx
+ ///
+ public class WasapiCaptureRT : IWaveIn
+ {
+ static readonly Guid IID_IAudioClient2 = new Guid("726778CD-F60A-4eda-82DE-E47610CD78AA");
+ private const long REFTIMES_PER_SEC = 10000000;
+ private const long REFTIMES_PER_MILLISEC = 10000;
+ private volatile WasapiCaptureState captureState;
+ private byte[] recordBuffer;
+ private readonly string device;
+ private int bytesPerFrame;
+ private WaveFormat waveFormat;
+ private AudioClient audioClient;
+ private IntPtr hEvent;
+ private Task captureTask;
+ private readonly SynchronizationContext syncContext;
+
+ ///
+ /// Indicates recorded data is available
+ ///
+ public event EventHandler DataAvailable;
+
+ ///
+ /// Indicates that all recorded data has now been received.
+ ///
+ public event EventHandler RecordingStopped;
+
+ ///
+ /// The effective latency in milliseconds
+ ///
+ public int LatencyMilliseconds { get; private set; }
+
+ ///
+ /// Initialises a new instance of the WASAPI capture class
+ ///
+ public WasapiCaptureRT() :
+ this(GetDefaultCaptureDevice())
+ {
+ }
+
+ ///
+ /// Initialises a new instance of the WASAPI capture class
+ ///
+ /// Capture device to use
+ public WasapiCaptureRT(string device)
+ {
+ this.device = device;
+ syncContext = SynchronizationContext.Current;
+ //this.waveFormat = audioClient.MixFormat;
+ }
+
+ ///
+ /// Recording wave format
+ ///
+ public virtual WaveFormat WaveFormat
+ {
+ get
+ {
+ // for convenience, return a WAVEFORMATEX, instead of the real
+ // WAVEFORMATEXTENSIBLE being used
+ if (waveFormat is WaveFormatExtensible wfe)
+ {
+ try
+ {
+ return wfe.ToStandardWaveFormat();
+ }
+ catch (InvalidOperationException)
+ {
+ // couldn't convert to a standard format
+ }
+ }
+ return waveFormat;
+ }
+ set => waveFormat = value;
+ }
+
+ ///
+ /// Way of enumerating all the audio capture devices available on the system
+ ///
+ ///
+ public static async Task> GetCaptureDevices()
+ {
+ var audioCaptureSelector = MediaDevice.GetAudioCaptureSelector();
+
+ // (a PropertyKey)
+ var supportsEventDrivenMode = "{1da5d803-d492-4edd-8c23-e0c0ffee7f0e} 7";
+
+ var captureDevices = await DeviceInformation.FindAllAsync(audioCaptureSelector, new[] { supportsEventDrivenMode } );
+ return captureDevices;
+ }
+
+ ///
+ /// Gets the default audio capture device
+ ///
+ /// The default audio capture device
+ public static string GetDefaultCaptureDevice()
+ {
+ var defaultCaptureDeviceId = MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default);
+ return defaultCaptureDeviceId;
+ }
+
+ ///
+ /// Initializes the capture device. Must be called on the UI (STA) thread.
+ /// If not called manually then StartRecording() will call it internally.
+ ///
+ public async Task InitAsync()
+ {
+ if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT));
+ if (captureState != WasapiCaptureState.Uninitialized) throw new InvalidOperationException("Already initialized");
+
+ var icbh = new ActivateAudioInterfaceCompletionHandler(ac2 => InitializeCaptureDevice((IAudioClient)ac2));
+ // must be called on UI thread
+ NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out var activationOperation);
+ audioClient = new AudioClient((IAudioClient)(await icbh));
+
+ hEvent = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
+ audioClient.SetEventHandle(hEvent);
+
+ captureState = WasapiCaptureState.Stopped;
+ }
+
+ private void InitializeCaptureDevice(IAudioClient audioClientInterface)
+ {
+ var audioClient = new AudioClient((IAudioClient)audioClientInterface);
+ if (waveFormat == null)
+ {
+ waveFormat = audioClient.MixFormat;
+ }
+
+ long requestedDuration = REFTIMES_PER_MILLISEC * 100;
+
+ if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, waveFormat))
+ {
+ throw new ArgumentException("Unsupported Wave Format");
+ }
+
+ var streamFlags = GetAudioClientStreamFlags();
+
+ audioClient.Initialize(AudioClientShareMode.Shared,
+ streamFlags,
+ requestedDuration,
+ 0,
+ waveFormat,
+ Guid.Empty);
+
+ int bufferFrameCount = audioClient.BufferSize;
+ bytesPerFrame = waveFormat.Channels * waveFormat.BitsPerSample / 8;
+ recordBuffer = new byte[bufferFrameCount * bytesPerFrame];
+ //Debug.WriteLine("record buffer size = {0}", this.recordBuffer.Length);
+
+ // Get back the effective latency from AudioClient
+ LatencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
+ }
+
+ ///
+ /// To allow overrides to specify different flags (e.g. loopback)
+ ///
+ protected virtual AudioClientStreamFlags GetAudioClientStreamFlags()
+ {
+ return AudioClientStreamFlags.EventCallback;
+ }
+
+ ///
+ /// Start Recording
+ ///
+ public async void StartRecording()
+ {
+ if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT));
+ if (captureState == WasapiCaptureState.Uninitialized) await InitAsync();
+
+ captureState = WasapiCaptureState.Recording;
+
+ captureTask = Task.Run(() => DoRecording());
+
+ Debug.WriteLine("Recording...");
+ }
+
+ ///
+ /// Stop Recording
+ ///
+ public void StopRecording()
+ {
+ if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT));
+ if (captureState != WasapiCaptureState.Recording) return;
+
+ captureState = WasapiCaptureState.Stopped;
+ captureTask?.Wait(5000);
+ //Debug.WriteLine("WasapiCaptureRT stopped");
+ }
+
+ private void DoRecording()
+ {
+ //Debug.WriteLine("Recording buffer size: " + audioClient.BufferSize);
+
+ var buf = new byte[audioClient.BufferSize * bytesPerFrame];
+
+ int bufLength = 0;
+ int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms
+
+ try
+ {
+ AudioCaptureClient capture = audioClient.AudioCaptureClient;
+ audioClient.Start();
+
+ int packetSize = capture.GetNextPacketSize();
+
+ while (captureState == WasapiCaptureState.Recording)
+ {
+ if (packetSize == 0)
+ {
+ if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0)
+ {
+ throw new Exception("Capture event timeout");
+ }
+ }
+
+ var pData = capture.GetBuffer(out var numFramesToRead, out var dwFlags);
+
+ if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0)
+ {
+ pData = IntPtr.Zero;
+ }
+
+ if (numFramesToRead == 0) { continue; }
+
+ int capturedBytes = numFramesToRead * bytesPerFrame;
+
+ if (pData == IntPtr.Zero)
+ {
+ Array.Clear(buf, bufLength, capturedBytes);
+ }
+ else
+ {
+ Marshal.Copy(pData, buf, bufLength, capturedBytes);
+ }
+
+ bufLength += capturedBytes;
+
+ capture.ReleaseBuffer(numFramesToRead);
+
+ if (bufLength >= minPacketSize)
+ {
+ DataAvailable?.Invoke(this, new WaveInEventArgs(buf, bufLength));
+ bufLength = 0;
+ }
+
+ packetSize = capture.GetNextPacketSize();
+ }
+ }
+ catch (Exception ex)
+ {
+ RaiseRecordingStopped(ex);
+ Debug.WriteLine("stop wasapi");
+ }
+ finally
+ {
+ RaiseRecordingStopped(null);
+
+ audioClient.Stop();
+ }
+ Debug.WriteLine("stop wasapi");
+ }
+
+ private void RaiseRecordingStopped(Exception exception)
+ {
+ var handler = RecordingStopped;
+ if (handler != null)
+ {
+ if (syncContext == null)
+ {
+ handler(this, new StoppedEventArgs(exception));
+ }
+ else
+ {
+ syncContext.Post(state => handler(this, new StoppedEventArgs(exception)), null);
+ }
+ }
+ }
+
+ private void ReadNextPacket(AudioCaptureClient capture)
+ {
+ IntPtr buffer;
+ int framesAvailable;
+ AudioClientBufferFlags flags;
+ int packetSize = capture.GetNextPacketSize();
+ int recordBufferOffset = 0;
+ //Debug.WriteLine(string.Format("packet size: {0} samples", packetSize / 4));
+
+ while (packetSize != 0)
+ {
+ buffer = capture.GetBuffer(out framesAvailable, out flags);
+
+ int bytesAvailable = framesAvailable * bytesPerFrame;
+
+ // apparently it is sometimes possible to read more frames than we were expecting?
+ // fix suggested by Michael Feld:
+ int spaceRemaining = Math.Max(0, recordBuffer.Length - recordBufferOffset);
+ if (spaceRemaining < bytesAvailable && recordBufferOffset > 0)
+ {
+ if (DataAvailable != null) DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset));
+ recordBufferOffset = 0;
+ }
+
+ // if not silence...
+ if ((flags & AudioClientBufferFlags.Silent) != AudioClientBufferFlags.Silent)
+ {
+ Marshal.Copy(buffer, recordBuffer, recordBufferOffset, bytesAvailable);
+ }
+ else
+ {
+ Array.Clear(recordBuffer, recordBufferOffset, bytesAvailable);
+ }
+ recordBufferOffset += bytesAvailable;
+ capture.ReleaseBuffer(framesAvailable);
+ packetSize = capture.GetNextPacketSize();
+ }
+ if (DataAvailable != null)
+ {
+ DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset));
+ }
+ }
+
+ ///
+ /// Dispose
+ ///
+ public void Dispose()
+ {
+ if (captureState == WasapiCaptureState.Disposed) return;
+
+ try
+ {
+ StopRecording();
+
+ NativeMethods.CloseHandle(hEvent);
+ audioClient?.Dispose();
+ }
+ catch (Exception ex)
+ {
+ Debug.WriteLine("Exception disposing WasapiCaptureRT: " + ex.ToString());
+ }
+
+ hEvent = IntPtr.Zero;
+ audioClient = null;
+
+ captureState = WasapiCaptureState.Disposed;
+ }
+ }
+}
diff --git a/NAudio.Win8/Wave/WaveOutputs/WasapiOutRT.cs b/NAudio.Universal/Wave/WaveOutputs/WasapiOutRT.cs
similarity index 97%
rename from NAudio.Win8/Wave/WaveOutputs/WasapiOutRT.cs
rename to NAudio.Universal/Wave/WaveOutputs/WasapiOutRT.cs
index afe89868..9f993450 100644
--- a/NAudio.Win8/Wave/WaveOutputs/WasapiOutRT.cs
+++ b/NAudio.Universal/Wave/WaveOutputs/WasapiOutRT.cs
@@ -1,654 +1,654 @@
-using System;
-using System.Linq;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-using System.Threading;
-using System.Threading.Tasks;
-using Windows.System.Threading;
-using NAudio.CoreAudioApi;
-using NAudio.CoreAudioApi.Interfaces;
-using NAudio.Dsp;
-using NAudio.Wave;
-using Windows.Media.Devices;
-using NAudio.Utils;
-using NAudio.Wave.SampleProviders;
-
-namespace NAudio.Win8.Wave.WaveOutputs
-{
- enum WasapiOutState
- {
- Uninitialized,
- Stopped,
- Paused,
- Playing,
- Stopping,
- Disposing,
- Disposed
- }
-
- ///
- /// WASAPI Out for Windows RT
- ///
- public class WasapiOutRT : IWavePlayer
- {
- private AudioClient audioClient;
- private readonly string device;
- private readonly AudioClientShareMode shareMode;
- private AudioRenderClient renderClient;
- private int latencyMilliseconds;
- private int bufferFrameCount;
- private int bytesPerFrame;
- private byte[] readBuffer;
- private volatile WasapiOutState playbackState;
- private WaveFormat outputFormat;
- private bool resamplerNeeded;
- private IntPtr frameEventWaitHandle;
- private readonly SynchronizationContext syncContext;
- private bool isInitialized;
- private readonly AutoResetEvent playThreadEvent;
-
- ///
- /// Playback Stopped
- ///
- public event EventHandler PlaybackStopped;
-
- ///
- /// WASAPI Out using default audio endpoint
- ///
- /// ShareMode - shared or exclusive
- /// Desired latency in milliseconds
- public WasapiOutRT(AudioClientShareMode shareMode, int latency) :
- this(GetDefaultAudioEndpoint(), shareMode, latency)
- {
-
- }
-
- ///
- /// Creates a new WASAPI Output
- ///
- /// Device to use
- ///
- ///
- public WasapiOutRT(string device, AudioClientShareMode shareMode, int latency)
- {
- this.device = device;
- this.shareMode = shareMode;
- this.latencyMilliseconds = latency;
- this.syncContext = SynchronizationContext.Current;
- playThreadEvent = new AutoResetEvent(false);
- }
-
- ///
- /// Properties of the client's audio stream.
- /// Set before calling init
- ///
- private AudioClientProperties? audioClientProperties = null;
-
- private Func waveProviderFunc;
-
- ///
- /// Sets the parameters that describe the properties of the client's audio stream.
- ///
- /// Boolean value to indicate whether or not the audio stream is hardware-offloaded.
- /// An enumeration that is used to specify the category of the audio stream.
- /// A bit-field describing the characteristics of the stream. Supported in Windows 8.1 and later.
- public void SetClientProperties(bool useHardwareOffload, AudioStreamCategory category, AudioClientStreamOptions options)
- {
- audioClientProperties = new AudioClientProperties()
- {
- cbSize = (uint) MarshalHelpers.SizeOf(),
- bIsOffload = Convert.ToInt32(useHardwareOffload),
- eCategory = category,
- Options = options
- };
- }
-
- private async Task Activate()
- {
- var icbh = new ActivateAudioInterfaceCompletionHandler(
- ac2 =>
- {
-
- if (this.audioClientProperties != null)
- {
- IntPtr p = Marshal.AllocHGlobal(Marshal.SizeOf(this.audioClientProperties.Value));
- Marshal.StructureToPtr(this.audioClientProperties.Value, p, false);
- ac2.SetClientProperties(p);
- Marshal.FreeHGlobal(p);
- // TODO: consider whether we can marshal this without the need for AllocHGlobal
- }
-
- /*var wfx = new WaveFormat(44100, 16, 2);
- int hr = ac2.Initialize(AudioClientShareMode.Shared,
- AudioClientStreamFlags.EventCallback | AudioClientStreamFlags.NoPersist,
- 10000000, 0, wfx, IntPtr.Zero);*/
- });
- var IID_IAudioClient2 = new Guid("726778CD-F60A-4eda-82DE-E47610CD78AA");
- IActivateAudioInterfaceAsyncOperation activationOperation;
- NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation);
- var audioClient2 = await icbh;
- this.audioClient = new AudioClient((IAudioClient)audioClient2);
- }
-
- private static string GetDefaultAudioEndpoint()
- {
- // can't use the MMDeviceEnumerator in WinRT
-
- return MediaDevice.GetDefaultAudioRenderId(AudioDeviceRole.Default);
- }
-
- private async void PlayThread()
- {
- await Activate();
- var playbackProvider = Init();
- bool isClientRunning = false;
- try
- {
- if (this.resamplerNeeded)
- {
- var resampler = new WdlResamplingSampleProvider(playbackProvider.ToSampleProvider(), outputFormat.SampleRate);
- playbackProvider = new SampleToWaveProvider(resampler);
- }
-
- // fill a whole buffer
- bufferFrameCount = audioClient.BufferSize;
- bytesPerFrame = outputFormat.Channels*outputFormat.BitsPerSample/8;
- readBuffer = new byte[bufferFrameCount*bytesPerFrame];
- FillBuffer(playbackProvider, bufferFrameCount);
- int timeout = 3 * latencyMilliseconds;
-
- while (playbackState != WasapiOutState.Disposed)
- {
- if (playbackState != WasapiOutState.Playing)
- {
- playThreadEvent.WaitOne(500);
- }
-
- // If still playing and notification is ok
- if (playbackState == WasapiOutState.Playing)
- {
- if (!isClientRunning)
- {
- audioClient.Start();
- isClientRunning = true;
- }
- // If using Event Sync, Wait for notification from AudioClient or Sleep half latency
- var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true);
- if (r != 0) throw new InvalidOperationException("Timed out waiting for event");
- // See how much buffer space is available.
- int numFramesPadding = 0;
- // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize
- numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0;
-
- int numFramesAvailable = bufferFrameCount - numFramesPadding;
- if (numFramesAvailable > 0)
- {
- FillBuffer(playbackProvider, numFramesAvailable);
- }
- }
-
- if (playbackState == WasapiOutState.Stopping)
- {
- // play the buffer out
- while (audioClient.CurrentPadding > 0)
- {
- await Task.Delay(latencyMilliseconds / 2);
- }
- audioClient.Stop();
- isClientRunning = false;
- audioClient.Reset();
- playbackState = WasapiOutState.Stopped;
- RaisePlaybackStopped(null);
- }
- if (playbackState == WasapiOutState.Disposing)
- {
- audioClient.Stop();
- isClientRunning = false;
- audioClient.Reset();
- playbackState = WasapiOutState.Disposed;
- var disposablePlaybackProvider = playbackProvider as IDisposable;
- if (disposablePlaybackProvider!=null)
- disposablePlaybackProvider.Dispose(); // do everything on this thread, even dispose in case it is Media Foundation
- RaisePlaybackStopped(null);
-
- }
-
- }
- }
- catch (Exception e)
- {
- RaisePlaybackStopped(e);
- }
- finally
- {
- audioClient.Dispose();
- audioClient = null;
- renderClient = null;
- NativeMethods.CloseHandle(frameEventWaitHandle);
-
- }
- }
-
- private void RaisePlaybackStopped(Exception e)
- {
- var handler = PlaybackStopped;
- if (handler != null)
- {
- if (this.syncContext == null)
- {
- handler(this, new StoppedEventArgs(e));
- }
- else
- {
- syncContext.Post(state => handler(this, new StoppedEventArgs(e)), null);
- }
- }
- }
-
- private void FillBuffer(IWaveProvider playbackProvider, int frameCount)
- {
- IntPtr buffer = renderClient.GetBuffer(frameCount);
- int readLength = frameCount*bytesPerFrame;
- int read = playbackProvider.Read(readBuffer, 0, readLength);
- if (read == 0)
- {
- playbackState = WasapiOutState.Stopping;
- }
- Marshal.Copy(readBuffer, 0, buffer, read);
- int actualFrameCount = read/bytesPerFrame;
- /*if (actualFrameCount != frameCount)
- {
- Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount ));
- }*/
- renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None);
- }
-
- #region IWavePlayer Members
-
- ///
- /// Begin Playback
- ///
- public void Play()
- {
- if (playbackState != WasapiOutState.Playing)
- {
- playbackState = WasapiOutState.Playing;
- playThreadEvent.Set();
- }
- }
-
- ///
- /// Stop playback and flush buffers
- ///
- public void Stop()
- {
- if (playbackState == WasapiOutState.Playing || playbackState == WasapiOutState.Paused)
- {
- playbackState = WasapiOutState.Stopping;
- playThreadEvent.Set();
- }
- }
-
- ///
- /// Stop playback without flushing buffers
- ///
- public void Pause()
- {
- if (playbackState == WasapiOutState.Playing)
- {
- playbackState = WasapiOutState.Paused;
- playThreadEvent.Set();
- }
- }
-
- ///
- /// Old init implementation. Use the func one
- ///
- ///
- ///
- [Obsolete]
- public async Task Init(IWaveProvider provider)
- {
- Init(() => provider);
- }
-
- ///
- /// Initializes with a function to create the provider that is made on the playback thread
- ///
- /// Creates the wave provider
- public void Init(Func waveProviderFunc)
- {
- if (isInitialized) throw new InvalidOperationException("Already Initialized");
- isInitialized = true;
- this.waveProviderFunc = waveProviderFunc;
- Task.Factory.StartNew(() =>
- {
- PlayThread();
- });
- }
-
- ///
- /// Initialize for playing the specified wave stream
- ///
- private IWaveProvider Init()
- {
- var waveProvider = waveProviderFunc();
- long latencyRefTimes = latencyMilliseconds*10000;
- outputFormat = waveProvider.WaveFormat;
- // first attempt uses the WaveFormat from the WaveStream
- WaveFormatExtensible closestSampleRateFormat;
- if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
- {
- // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
- // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
- // They say : "In shared mode, the audio engine always supports the mix format"
- // The MixFormat is more likely to be a WaveFormatExtensible.
- if (closestSampleRateFormat == null)
- {
- WaveFormat correctSampleRateFormat = audioClient.MixFormat;
- /*WaveFormat.CreateIeeeFloatWaveFormat(
- audioClient.MixFormat.SampleRate,
- audioClient.MixFormat.Channels);*/
-
- if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
- {
- // Iterate from Worst to Best Format
- WaveFormatExtensible[] bestToWorstFormats =
- {
- new WaveFormatExtensible(
- outputFormat.SampleRate, 32,
- outputFormat.Channels),
- new WaveFormatExtensible(
- outputFormat.SampleRate, 24,
- outputFormat.Channels),
- new WaveFormatExtensible(
- outputFormat.SampleRate, 16,
- outputFormat.Channels),
- };
-
- // Check from best Format to worst format ( Float32, Int24, Int16 )
- for (int i = 0; i < bestToWorstFormats.Length; i++)
- {
- correctSampleRateFormat = bestToWorstFormats[i];
- if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
- {
- break;
- }
- correctSampleRateFormat = null;
- }
-
- // If still null, then test on the PCM16, 2 channels
- if (correctSampleRateFormat == null)
- {
- // Last Last Last Chance (Thanks WASAPI)
- correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
- if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
- {
- throw new NotSupportedException("Can't find a supported format to use");
- }
- }
- }
- outputFormat = correctSampleRateFormat;
- }
- else
- {
- outputFormat = closestSampleRateFormat;
- }
-
- // just check that we can make it.
- //using (new MediaFoundationResampler(waveProvider, outputFormat))
- {
- }
- this.resamplerNeeded = true;
- }
- else
- {
- resamplerNeeded = false;
- }
-
- // Init Shared or Exclusive
- if (shareMode == AudioClientShareMode.Shared)
- {
- // With EventCallBack and Shared,
- audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
- outputFormat, Guid.Empty);
-
- // Get back the effective latency from AudioClient. On Windows 10 it can be 0
- if (audioClient.StreamLatency > 0)
- latencyMilliseconds = (int) (audioClient.StreamLatency/10000);
- }
- else
- {
- // With EventCallBack and Exclusive, both latencies must equals
- audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
- outputFormat, Guid.Empty);
- }
-
- // Create the Wait Event Handle
- frameEventWaitHandle = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
- audioClient.SetEventHandle(frameEventWaitHandle);
-
- // Get the RenderClient
- renderClient = audioClient.AudioRenderClient;
- return waveProvider;
- }
-
- ///
- /// Playback State
- ///
- public PlaybackState PlaybackState
- {
- get
- {
- switch (playbackState)
- {
- case WasapiOutState.Playing:
- return PlaybackState.Playing;
- case WasapiOutState.Paused:
- return PlaybackState.Paused;
- default:
- return PlaybackState.Stopped;
- }
- }
- }
-
- #endregion
-
- ///
- /// Dispose
- ///
- public void Dispose()
- {
- if (audioClient != null)
- {
- playbackState = WasapiOutState.Disposing;
- playThreadEvent.Set();
- }
- }
- }
-
- ///
- /// Some useful native methods for Windows 8/10 support ( https://msdn.microsoft.com/en-us/library/windows/desktop/hh802935(v=vs.85).aspx )
- ///
- class NativeMethods
- {
- [DllImport("api-ms-win-core-synch-l1-2-0.dll", CharSet = CharSet.Unicode, ExactSpelling = false,
- PreserveSig = true, SetLastError = true)]
- internal static extern IntPtr CreateEventExW(IntPtr lpEventAttributes, IntPtr lpName, int dwFlags,
- EventAccess dwDesiredAccess);
-
-
- [DllImport("api-ms-win-core-handle-l1-1-0.dll", ExactSpelling = true, PreserveSig = true, SetLastError = true)]
- public static extern bool CloseHandle(IntPtr hObject);
-
- [DllImport("api-ms-win-core-synch-l1-2-0.dll", ExactSpelling = true, PreserveSig = true, SetLastError = true)]
- public static extern int WaitForSingleObjectEx(IntPtr hEvent, int milliseconds, bool bAlertable);
-
- ///
- /// Enables Windows Store apps to access preexisting Component Object Model (COM) interfaces in the WASAPI family.
- ///
- /// A device interface ID for an audio device. This is normally retrieved from a DeviceInformation object or one of the methods of the MediaDevice class.
- /// The IID of a COM interface in the WASAPI family, such as IAudioClient.
- /// Interface-specific activation parameters. For more information, see the pActivationParams parameter in IMMDevice::Activate.
- ///
- ///
- [DllImport("Mmdevapi.dll", ExactSpelling = true, PreserveSig = false)]
- public static extern void ActivateAudioInterfaceAsync(
- [In, MarshalAs(UnmanagedType.LPWStr)] string deviceInterfacePath,
- [In, MarshalAs(UnmanagedType.LPStruct)] Guid riid,
- [In] IntPtr activationParams, // n.b. is actually a pointer to a PropVariant, but we never need to pass anything but null
- [In] IActivateAudioInterfaceCompletionHandler completionHandler,
- out IActivateAudioInterfaceAsyncOperation activationOperation);
- }
-
- // trying some ideas from Lucian Wischik (ljw1004):
- // http://www.codeproject.com/Articles/460145/Recording-and-playing-PCM-audio-on-Windows-8-VB
-
- [Flags]
- internal enum EventAccess
- {
- STANDARD_RIGHTS_REQUIRED = 0xF0000,
- SYNCHRONIZE = 0x100000,
- EVENT_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0x3
- }
-
- internal class ActivateAudioInterfaceCompletionHandler :
- IActivateAudioInterfaceCompletionHandler, IAgileObject
- {
- private Action initializeAction;
- private TaskCompletionSource tcs = new TaskCompletionSource();
-
- public ActivateAudioInterfaceCompletionHandler(
- Action initializeAction)
- {
- this.initializeAction = initializeAction;
- }
-
- public void ActivateCompleted(IActivateAudioInterfaceAsyncOperation activateOperation)
- {
- // First get the activation results, and see if anything bad happened then
- int hr = 0;
- object unk = null;
- activateOperation.GetActivateResult(out hr, out unk);
- if (hr != 0)
- {
- tcs.TrySetException(Marshal.GetExceptionForHR(hr, new IntPtr(-1)));
- return;
- }
-
- var pAudioClient = (IAudioClient2) unk;
-
- // Next try to call the client's (synchronous, blocking) initialization method.
- try
- {
- initializeAction(pAudioClient);
- tcs.SetResult(pAudioClient);
- }
- catch (Exception ex)
- {
- tcs.TrySetException(ex);
- }
-
-
- }
-
-
- public TaskAwaiter GetAwaiter()
- {
- return tcs.Task.GetAwaiter();
- }
- }
-
- [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("41D949AB-9862-444A-80F6-C261334DA5EB")]
- interface IActivateAudioInterfaceCompletionHandler
- {
- //virtual HRESULT STDMETHODCALLTYPE ActivateCompleted(/*[in]*/ _In_
- // IActivateAudioInterfaceAsyncOperation *activateOperation) = 0;
- void ActivateCompleted(IActivateAudioInterfaceAsyncOperation activateOperation);
- }
-
-
- [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("72A22D78-CDE4-431D-B8CC-843A71199B6D")]
- interface IActivateAudioInterfaceAsyncOperation
- {
- //virtual HRESULT STDMETHODCALLTYPE GetActivateResult(/*[out]*/ _Out_
- // HRESULT *activateResult, /*[out]*/ _Outptr_result_maybenull_ IUnknown **activatedInterface) = 0;
- void GetActivateResult([Out] out int activateResult,
- [Out, MarshalAs(UnmanagedType.IUnknown)] out object activateInterface);
- }
-
-
- [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("726778CD-F60A-4eda-82DE-E47610CD78AA")]
- interface IAudioClient2
- {
- [PreserveSig]
- int Initialize(AudioClientShareMode shareMode,
- AudioClientStreamFlags streamFlags,
- long hnsBufferDuration, // REFERENCE_TIME
- long hnsPeriodicity, // REFERENCE_TIME
- [In] WaveFormat pFormat,
- [In] IntPtr audioSessionGuid);
-
- // ref Guid AudioSessionGuid
-
- ///
- /// The GetBufferSize method retrieves the size (maximum capacity) of the endpoint buffer.
- ///
- int GetBufferSize(out uint bufferSize);
-
- [return: MarshalAs(UnmanagedType.I8)]
- long GetStreamLatency();
-
- int GetCurrentPadding(out int currentPadding);
-
- [PreserveSig]
- int IsFormatSupported(
- AudioClientShareMode shareMode,
- [In] WaveFormat pFormat,
- out IntPtr closestMatchFormat);
-
- int GetMixFormat(out IntPtr deviceFormatPointer);
-
- // REFERENCE_TIME is 64 bit int
- int GetDevicePeriod(out long defaultDevicePeriod, out long minimumDevicePeriod);
-
- int Start();
-
- int Stop();
-
- int Reset();
-
- int SetEventHandle(IntPtr eventHandle);
-
- ///
- /// The GetService method accesses additional services from the audio client object.
- ///
- /// The interface ID for the requested service.
- /// Pointer to a pointer variable into which the method writes the address of an instance of the requested interface.
- [PreserveSig]
- int GetService([In, MarshalAs(UnmanagedType.LPStruct)] Guid interfaceId,
- [Out, MarshalAs(UnmanagedType.IUnknown)] out object interfacePointer);
-
- //virtual HRESULT STDMETHODCALLTYPE IsOffloadCapable(/*[in]*/ _In_
- // AUDIO_STREAM_CATEGORY Category, /*[in]*/ _Out_ BOOL *pbOffloadCapable) = 0;
- void IsOffloadCapable(int category, out bool pbOffloadCapable);
- //virtual HRESULT STDMETHODCALLTYPE SetClientProperties(/*[in]*/ _In_
- // const AudioClientProperties *pProperties) = 0;
- void SetClientProperties([In] IntPtr pProperties);
- // TODO: try this: void SetClientProperties([In, MarshalAs(UnmanagedType.LPStruct)] AudioClientProperties pProperties);
- //virtual HRESULT STDMETHODCALLTYPE GetBufferSizeLimits(/*[in]*/ _In_
- // const WAVEFORMATEX *pFormat, /*[in]*/ _In_ BOOL bEventDriven, /*[in]*/
- // _Out_ REFERENCE_TIME *phnsMinBufferDuration, /*[in]*/ _Out_
- // REFERENCE_TIME *phnsMaxBufferDuration) = 0;
- void GetBufferSizeLimits(IntPtr pFormat, bool bEventDriven,
- out long phnsMinBufferDuration, out long phnsMaxBufferDuration);
- }
-
- [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("94ea2b94-e9cc-49e0-c0ff-ee64ca8f5b90")]
- interface IAgileObject
- {
-
- }
-
-
-}
+using System;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+using System.Threading;
+using System.Threading.Tasks;
+using Windows.System.Threading;
+using NAudio.CoreAudioApi;
+using NAudio.CoreAudioApi.Interfaces;
+using NAudio.Dsp;
+using NAudio.Wave;
+using Windows.Media.Devices;
+using NAudio.Utils;
+using NAudio.Wave.SampleProviders;
+
+namespace NAudio.Win8.Wave.WaveOutputs
+{
+ enum WasapiOutState
+ {
+ Uninitialized,
+ Stopped,
+ Paused,
+ Playing,
+ Stopping,
+ Disposing,
+ Disposed
+ }
+
+ ///
+ /// WASAPI Out for Windows RT
+ ///
+ public class WasapiOutRT : IWavePlayer
+ {
+ private AudioClient audioClient;
+ private readonly string device;
+ private readonly AudioClientShareMode shareMode;
+ private AudioRenderClient renderClient;
+ private int latencyMilliseconds;
+ private int bufferFrameCount;
+ private int bytesPerFrame;
+ private byte[] readBuffer;
+ private volatile WasapiOutState playbackState;
+ private WaveFormat outputFormat;
+ private bool resamplerNeeded;
+ private IntPtr frameEventWaitHandle;
+ private readonly SynchronizationContext syncContext;
+ private bool isInitialized;
+ private readonly AutoResetEvent playThreadEvent;
+
+ ///
+ /// Playback Stopped
+ ///
+ public event EventHandler PlaybackStopped;
+
+ ///
+ /// WASAPI Out using default audio endpoint
+ ///
+ /// ShareMode - shared or exclusive
+ /// Desired latency in milliseconds
+ public WasapiOutRT(AudioClientShareMode shareMode, int latency) :
+ this(GetDefaultAudioEndpoint(), shareMode, latency)
+ {
+
+ }
+
+ ///
+ /// Creates a new WASAPI Output
+ ///
+ /// Device to use
+ ///
+ ///
+ public WasapiOutRT(string device, AudioClientShareMode shareMode, int latency)
+ {
+ this.device = device;
+ this.shareMode = shareMode;
+ this.latencyMilliseconds = latency;
+ this.syncContext = SynchronizationContext.Current;
+ playThreadEvent = new AutoResetEvent(false);
+ }
+
+ ///
+ /// Properties of the client's audio stream.
+ /// Set before calling init
+ ///
+ private AudioClientProperties? audioClientProperties = null;
+
+ private Func waveProviderFunc;
+
+ ///
+ /// Sets the parameters that describe the properties of the client's audio stream.
+ ///
+ /// Boolean value to indicate whether or not the audio stream is hardware-offloaded.
+ /// An enumeration that is used to specify the category of the audio stream.
+ /// A bit-field describing the characteristics of the stream. Supported in Windows 8.1 and later.
+ public void SetClientProperties(bool useHardwareOffload, AudioStreamCategory category, AudioClientStreamOptions options)
+ {
+ audioClientProperties = new AudioClientProperties()
+ {
+ cbSize = (uint) MarshalHelpers.SizeOf(),
+ bIsOffload = Convert.ToInt32(useHardwareOffload),
+ eCategory = category,
+ Options = options
+ };
+ }
+
+ private async Task Activate()
+ {
+ var icbh = new ActivateAudioInterfaceCompletionHandler(
+ ac2 =>
+ {
+
+ if (this.audioClientProperties != null)
+ {
+ IntPtr p = Marshal.AllocHGlobal(Marshal.SizeOf(this.audioClientProperties.Value));
+ Marshal.StructureToPtr(this.audioClientProperties.Value, p, false);
+ ac2.SetClientProperties(p);
+ Marshal.FreeHGlobal(p);
+ // TODO: consider whether we can marshal this without the need for AllocHGlobal
+ }
+
+ /*var wfx = new WaveFormat(44100, 16, 2);
+ int hr = ac2.Initialize(AudioClientShareMode.Shared,
+ AudioClientStreamFlags.EventCallback | AudioClientStreamFlags.NoPersist,
+ 10000000, 0, wfx, IntPtr.Zero);*/
+ });
+ var IID_IAudioClient2 = new Guid("726778CD-F60A-4eda-82DE-E47610CD78AA");
+ IActivateAudioInterfaceAsyncOperation activationOperation;
+ NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation);
+ var audioClient2 = await icbh;
+ this.audioClient = new AudioClient((IAudioClient)audioClient2);
+ }
+
+ private static string GetDefaultAudioEndpoint()
+ {
+ // can't use the MMDeviceEnumerator in WinRT
+
+ return MediaDevice.GetDefaultAudioRenderId(AudioDeviceRole.Default);
+ }
+
+ private async void PlayThread()
+ {
+ await Activate();
+ var playbackProvider = Init();
+ bool isClientRunning = false;
+ try
+ {
+ if (this.resamplerNeeded)
+ {
+ var resampler = new WdlResamplingSampleProvider(playbackProvider.ToSampleProvider(), outputFormat.SampleRate);
+ playbackProvider = new SampleToWaveProvider(resampler);
+ }
+
+ // fill a whole buffer
+ bufferFrameCount = audioClient.BufferSize;
+ bytesPerFrame = outputFormat.Channels*outputFormat.BitsPerSample/8;
+ readBuffer = new byte[bufferFrameCount*bytesPerFrame];
+ FillBuffer(playbackProvider, bufferFrameCount);
+ int timeout = 3 * latencyMilliseconds;
+
+ while (playbackState != WasapiOutState.Disposed)
+ {
+ if (playbackState != WasapiOutState.Playing)
+ {
+ playThreadEvent.WaitOne(500);
+ }
+
+ // If still playing and notification is ok
+ if (playbackState == WasapiOutState.Playing)
+ {
+ if (!isClientRunning)
+ {
+ audioClient.Start();
+ isClientRunning = true;
+ }
+ // If using Event Sync, Wait for notification from AudioClient or Sleep half latency
+ var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true);
+ if (r != 0) throw new InvalidOperationException("Timed out waiting for event");
+ // See how much buffer space is available.
+ int numFramesPadding = 0;
+ // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize
+ numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0;
+
+ int numFramesAvailable = bufferFrameCount - numFramesPadding;
+ if (numFramesAvailable > 0)
+ {
+ FillBuffer(playbackProvider, numFramesAvailable);
+ }
+ }
+
+ if (playbackState == WasapiOutState.Stopping)
+ {
+ // play the buffer out
+ while (audioClient.CurrentPadding > 0)
+ {
+ await Task.Delay(latencyMilliseconds / 2);
+ }
+ audioClient.Stop();
+ isClientRunning = false;
+ audioClient.Reset();
+ playbackState = WasapiOutState.Stopped;
+ RaisePlaybackStopped(null);
+ }
+ if (playbackState == WasapiOutState.Disposing)
+ {
+ audioClient.Stop();
+ isClientRunning = false;
+ audioClient.Reset();
+ playbackState = WasapiOutState.Disposed;
+ var disposablePlaybackProvider = playbackProvider as IDisposable;
+ if (disposablePlaybackProvider!=null)
+ disposablePlaybackProvider.Dispose(); // do everything on this thread, even dispose in case it is Media Foundation
+ RaisePlaybackStopped(null);
+
+ }
+
+ }
+ }
+ catch (Exception e)
+ {
+ RaisePlaybackStopped(e);
+ }
+ finally
+ {
+ audioClient.Dispose();
+ audioClient = null;
+ renderClient = null;
+ NativeMethods.CloseHandle(frameEventWaitHandle);
+
+ }
+ }
+
+ private void RaisePlaybackStopped(Exception e)
+ {
+ var handler = PlaybackStopped;
+ if (handler != null)
+ {
+ if (this.syncContext == null)
+ {
+ handler(this, new StoppedEventArgs(e));
+ }
+ else
+ {
+ syncContext.Post(state => handler(this, new StoppedEventArgs(e)), null);
+ }
+ }
+ }
+
+ private void FillBuffer(IWaveProvider playbackProvider, int frameCount)
+ {
+ IntPtr buffer = renderClient.GetBuffer(frameCount);
+ int readLength = frameCount*bytesPerFrame;
+ int read = playbackProvider.Read(readBuffer, 0, readLength);
+ if (read == 0)
+ {
+ playbackState = WasapiOutState.Stopping;
+ }
+ Marshal.Copy(readBuffer, 0, buffer, read);
+ int actualFrameCount = read/bytesPerFrame;
+ /*if (actualFrameCount != frameCount)
+ {
+ Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount ));
+ }*/
+ renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None);
+ }
+
+ #region IWavePlayer Members
+
+ ///
+ /// Begin Playback
+ ///
+ public void Play()
+ {
+ if (playbackState != WasapiOutState.Playing)
+ {
+ playbackState = WasapiOutState.Playing;
+ playThreadEvent.Set();
+ }
+ }
+
+ ///
+ /// Stop playback and flush buffers
+ ///
+ public void Stop()
+ {
+ if (playbackState == WasapiOutState.Playing || playbackState == WasapiOutState.Paused)
+ {
+ playbackState = WasapiOutState.Stopping;
+ playThreadEvent.Set();
+ }
+ }
+
+ ///
+ /// Stop playback without flushing buffers
+ ///
+ public void Pause()
+ {
+ if (playbackState == WasapiOutState.Playing)
+ {
+ playbackState = WasapiOutState.Paused;
+ playThreadEvent.Set();
+ }
+ }
+
+ ///
+ /// Old init implementation. Use the func one
+ ///
+ ///
+ ///
+ [Obsolete]
+ public async Task Init(IWaveProvider provider)
+ {
+ Init(() => provider);
+ }
+
+ ///
+ /// Initializes with a function to create the provider that is made on the playback thread
+ ///
+ /// Creates the wave provider
+ public void Init(Func waveProviderFunc)
+ {
+ if (isInitialized) throw new InvalidOperationException("Already Initialized");
+ isInitialized = true;
+ this.waveProviderFunc = waveProviderFunc;
+ Task.Factory.StartNew(() =>
+ {
+ PlayThread();
+ });
+ }
+
+ ///
+ /// Initialize for playing the specified wave stream
+ ///
+ private IWaveProvider Init()
+ {
+ var waveProvider = waveProviderFunc();
+ long latencyRefTimes = latencyMilliseconds*10000;
+ outputFormat = waveProvider.WaveFormat;
+ // first attempt uses the WaveFormat from the WaveStream
+ WaveFormatExtensible closestSampleRateFormat;
+ if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
+ {
+ // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
+ // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
+ // They say : "In shared mode, the audio engine always supports the mix format"
+ // The MixFormat is more likely to be a WaveFormatExtensible.
+ if (closestSampleRateFormat == null)
+ {
+ WaveFormat correctSampleRateFormat = audioClient.MixFormat;
+ /*WaveFormat.CreateIeeeFloatWaveFormat(
+ audioClient.MixFormat.SampleRate,
+ audioClient.MixFormat.Channels);*/
+
+ if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
+ {
+ // Iterate from Worst to Best Format
+ WaveFormatExtensible[] bestToWorstFormats =
+ {
+ new WaveFormatExtensible(
+ outputFormat.SampleRate, 32,
+ outputFormat.Channels),
+ new WaveFormatExtensible(
+ outputFormat.SampleRate, 24,
+ outputFormat.Channels),
+ new WaveFormatExtensible(
+ outputFormat.SampleRate, 16,
+ outputFormat.Channels),
+ };
+
+ // Check from best Format to worst format ( Float32, Int24, Int16 )
+ for (int i = 0; i < bestToWorstFormats.Length; i++)
+ {
+ correctSampleRateFormat = bestToWorstFormats[i];
+ if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
+ {
+ break;
+ }
+ correctSampleRateFormat = null;
+ }
+
+ // If still null, then test on the PCM16, 2 channels
+ if (correctSampleRateFormat == null)
+ {
+ // Last Last Last Chance (Thanks WASAPI)
+ correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
+ if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
+ {
+ throw new NotSupportedException("Can't find a supported format to use");
+ }
+ }
+ }
+ outputFormat = correctSampleRateFormat;
+ }
+ else
+ {
+ outputFormat = closestSampleRateFormat;
+ }
+
+ // just check that we can make it.
+ //using (new MediaFoundationResampler(waveProvider, outputFormat))
+ {
+ }
+ this.resamplerNeeded = true;
+ }
+ else
+ {
+ resamplerNeeded = false;
+ }
+
+ // Init Shared or Exclusive
+ if (shareMode == AudioClientShareMode.Shared)
+ {
+ // With EventCallBack and Shared,
+ audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
+ outputFormat, Guid.Empty);
+
+ // Get back the effective latency from AudioClient. On Windows 10 it can be 0
+ if (audioClient.StreamLatency > 0)
+ latencyMilliseconds = (int) (audioClient.StreamLatency/10000);
+ }
+ else
+ {
+ // With EventCallBack and Exclusive, both latencies must equals
+ audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
+ outputFormat, Guid.Empty);
+ }
+
+ // Create the Wait Event Handle
+ frameEventWaitHandle = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
+ audioClient.SetEventHandle(frameEventWaitHandle);
+
+ // Get the RenderClient
+ renderClient = audioClient.AudioRenderClient;
+ return waveProvider;
+ }
+
+ ///
+ /// Playback State
+ ///
+ public PlaybackState PlaybackState
+ {
+ get
+ {
+ switch (playbackState)
+ {
+ case WasapiOutState.Playing:
+ return PlaybackState.Playing;
+ case WasapiOutState.Paused:
+ return PlaybackState.Paused;
+ default:
+ return PlaybackState.Stopped;
+ }
+ }
+ }
+
+ #endregion
+
+ ///
+ /// Dispose
+ ///
+ public void Dispose()
+ {
+ if (audioClient != null)
+ {
+ playbackState = WasapiOutState.Disposing;
+ playThreadEvent.Set();
+ }
+ }
+ }
+
+ ///
+ /// Some useful native methods for Windows 8/10 support ( https://msdn.microsoft.com/en-us/library/windows/desktop/hh802935(v=vs.85).aspx )
+ ///
+ class NativeMethods
+ {
+ [DllImport("api-ms-win-core-synch-l1-2-0.dll", CharSet = CharSet.Unicode, ExactSpelling = false,
+ PreserveSig = true, SetLastError = true)]
+ internal static extern IntPtr CreateEventExW(IntPtr lpEventAttributes, IntPtr lpName, int dwFlags,
+ EventAccess dwDesiredAccess);
+
+
+ [DllImport("api-ms-win-core-handle-l1-1-0.dll", ExactSpelling = true, PreserveSig = true, SetLastError = true)]
+ public static extern bool CloseHandle(IntPtr hObject);
+
+ [DllImport("api-ms-win-core-synch-l1-2-0.dll", ExactSpelling = true, PreserveSig = true, SetLastError = true)]
+ public static extern int WaitForSingleObjectEx(IntPtr hEvent, int milliseconds, bool bAlertable);
+
+ ///
+ /// Enables Windows Store apps to access preexisting Component Object Model (COM) interfaces in the WASAPI family.
+ ///
+ /// A device interface ID for an audio device. This is normally retrieved from a DeviceInformation object or one of the methods of the MediaDevice class.
+ /// The IID of a COM interface in the WASAPI family, such as IAudioClient.
+ /// Interface-specific activation parameters. For more information, see the pActivationParams parameter in IMMDevice::Activate.
+ ///
+ ///
+ [DllImport("Mmdevapi.dll", ExactSpelling = true, PreserveSig = false)]
+ public static extern void ActivateAudioInterfaceAsync(
+ [In, MarshalAs(UnmanagedType.LPWStr)] string deviceInterfacePath,
+ [In, MarshalAs(UnmanagedType.LPStruct)] Guid riid,
+ [In] IntPtr activationParams, // n.b. is actually a pointer to a PropVariant, but we never need to pass anything but null
+ [In] IActivateAudioInterfaceCompletionHandler completionHandler,
+ out IActivateAudioInterfaceAsyncOperation activationOperation);
+ }
+
+ // trying some ideas from Lucian Wischik (ljw1004):
+ // http://www.codeproject.com/Articles/460145/Recording-and-playing-PCM-audio-on-Windows-8-VB
+
+ [Flags]
+ internal enum EventAccess
+ {
+ STANDARD_RIGHTS_REQUIRED = 0xF0000,
+ SYNCHRONIZE = 0x100000,
+ EVENT_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0x3
+ }
+
+ internal class ActivateAudioInterfaceCompletionHandler :
+ IActivateAudioInterfaceCompletionHandler, IAgileObject
+ {
+ private Action initializeAction;
+ private TaskCompletionSource tcs = new TaskCompletionSource();
+
+ public ActivateAudioInterfaceCompletionHandler(
+ Action initializeAction)
+ {
+ this.initializeAction = initializeAction;
+ }
+
+ public void ActivateCompleted(IActivateAudioInterfaceAsyncOperation activateOperation)
+ {
+ // First get the activation results, and see if anything bad happened then
+ int hr = 0;
+ object unk = null;
+ activateOperation.GetActivateResult(out hr, out unk);
+ if (hr != 0)
+ {
+ tcs.TrySetException(Marshal.GetExceptionForHR(hr, new IntPtr(-1)));
+ return;
+ }
+
+ var pAudioClient = (IAudioClient2) unk;
+
+ // Next try to call the client's (synchronous, blocking) initialization method.
+ try
+ {
+ initializeAction(pAudioClient);
+ tcs.SetResult(pAudioClient);
+ }
+ catch (Exception ex)
+ {
+ tcs.TrySetException(ex);
+ }
+
+
+ }
+
+
+ public TaskAwaiter GetAwaiter()
+ {
+ return tcs.Task.GetAwaiter();
+ }
+ }
+
+ [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("41D949AB-9862-444A-80F6-C261334DA5EB")]
+ interface IActivateAudioInterfaceCompletionHandler
+ {
+ //virtual HRESULT STDMETHODCALLTYPE ActivateCompleted(/*[in]*/ _In_
+ // IActivateAudioInterfaceAsyncOperation *activateOperation) = 0;
+ void ActivateCompleted(IActivateAudioInterfaceAsyncOperation activateOperation);
+ }
+
+
+ [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("72A22D78-CDE4-431D-B8CC-843A71199B6D")]
+ interface IActivateAudioInterfaceAsyncOperation
+ {
+ //virtual HRESULT STDMETHODCALLTYPE GetActivateResult(/*[out]*/ _Out_
+ // HRESULT *activateResult, /*[out]*/ _Outptr_result_maybenull_ IUnknown **activatedInterface) = 0;
+ void GetActivateResult([Out] out int activateResult,
+ [Out, MarshalAs(UnmanagedType.IUnknown)] out object activateInterface);
+ }
+
+
+ [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("726778CD-F60A-4eda-82DE-E47610CD78AA")]
+ interface IAudioClient2
+ {
+ [PreserveSig]
+ int Initialize(AudioClientShareMode shareMode,
+ AudioClientStreamFlags streamFlags,
+ long hnsBufferDuration, // REFERENCE_TIME
+ long hnsPeriodicity, // REFERENCE_TIME
+ [In] WaveFormat pFormat,
+ [In] IntPtr audioSessionGuid);
+
+ // ref Guid AudioSessionGuid
+
+ ///
+ /// The GetBufferSize method retrieves the size (maximum capacity) of the endpoint buffer.
+ ///
+ int GetBufferSize(out uint bufferSize);
+
+ [return: MarshalAs(UnmanagedType.I8)]
+ long GetStreamLatency();
+
+ int GetCurrentPadding(out int currentPadding);
+
+ [PreserveSig]
+ int IsFormatSupported(
+ AudioClientShareMode shareMode,
+ [In] WaveFormat pFormat,
+ out IntPtr closestMatchFormat);
+
+ int GetMixFormat(out IntPtr deviceFormatPointer);
+
+ // REFERENCE_TIME is 64 bit int
+ int GetDevicePeriod(out long defaultDevicePeriod, out long minimumDevicePeriod);
+
+ int Start();
+
+ int Stop();
+
+ int Reset();
+
+ int SetEventHandle(IntPtr eventHandle);
+
+ ///
+ /// The GetService method accesses additional services from the audio client object.
+ ///
+ /// The interface ID for the requested service.
+ /// Pointer to a pointer variable into which the method writes the address of an instance of the requested interface.
+ [PreserveSig]
+ int GetService([In, MarshalAs(UnmanagedType.LPStruct)] Guid interfaceId,
+ [Out, MarshalAs(UnmanagedType.IUnknown)] out object interfacePointer);
+
+ //virtual HRESULT STDMETHODCALLTYPE IsOffloadCapable(/*[in]*/ _In_
+ // AUDIO_STREAM_CATEGORY Category, /*[in]*/ _Out_ BOOL *pbOffloadCapable) = 0;
+ void IsOffloadCapable(int category, out bool pbOffloadCapable);
+ //virtual HRESULT STDMETHODCALLTYPE SetClientProperties(/*[in]*/ _In_
+ // const AudioClientProperties *pProperties) = 0;
+ void SetClientProperties([In] IntPtr pProperties);
+ // TODO: try this: void SetClientProperties([In, MarshalAs(UnmanagedType.LPStruct)] AudioClientProperties pProperties);
+ //virtual HRESULT STDMETHODCALLTYPE GetBufferSizeLimits(/*[in]*/ _In_
+ // const WAVEFORMATEX *pFormat, /*[in]*/ _In_ BOOL bEventDriven, /*[in]*/
+ // _Out_ REFERENCE_TIME *phnsMinBufferDuration, /*[in]*/ _Out_
+ // REFERENCE_TIME *phnsMaxBufferDuration) = 0;
+ void GetBufferSizeLimits(IntPtr pFormat, bool bEventDriven,
+ out long phnsMinBufferDuration, out long phnsMaxBufferDuration);
+ }
+
+ [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("94ea2b94-e9cc-49e0-c0ff-ee64ca8f5b90")]
+ interface IAgileObject
+ {
+
+ }
+
+
+}
diff --git a/NAudio.Win8/Wave/WaveOutputs/WaveFileWriterRT.cs b/NAudio.Universal/Wave/WaveOutputs/WaveFileWriterRT.cs
similarity index 97%
rename from NAudio.Win8/Wave/WaveOutputs/WaveFileWriterRT.cs
rename to NAudio.Universal/Wave/WaveOutputs/WaveFileWriterRT.cs
index 7f839638..df62e554 100644
--- a/NAudio.Win8/Wave/WaveOutputs/WaveFileWriterRT.cs
+++ b/NAudio.Universal/Wave/WaveOutputs/WaveFileWriterRT.cs
@@ -1,489 +1,489 @@
-using NAudio.Wave.SampleProviders;
-using System;
-using System.IO;
-using System.Threading;
-using System.Threading.Tasks;
-using Windows.Storage;
-
-namespace NAudio.Wave
-{
- ///
- /// This class writes WAV data to a .wav file on disk
- ///
- public class WaveFileWriterRT : Stream
- {
- private Stream outStream;
- private readonly BinaryWriter writer;
- private long dataSizePos;
- private long factSampleCountPos;
- private long dataChunkSize;
- private readonly WaveFormat format;
- private string filename;
-
- // Protects WriteAsync and FlushAsync from overlapping
- private readonly Semaphore asyncOperationsLock = new Semaphore(1, 100);
-
- ///
- /// Creates a 16 bit Wave File from an ISampleProvider
- /// BEWARE: the source provider must not return data indefinitely
- ///
- /// The filename to write to
- /// The source sample provider
- public static Task CreateWaveFile16Async(string filename, ISampleProvider sourceProvider)
- {
- return CreateWaveFileAsync(filename, new SampleToWaveProvider16(sourceProvider));
- }
-
- ///
- /// Creates a Wave file by reading all the data from a WaveProvider
- /// BEWARE: the WaveProvider MUST return 0 from its Read method when it is finished,
- /// or the Wave File will grow indefinitely.
- ///
- /// The filename to use
- /// The source WaveProvider
- public static async Task CreateWaveFileAsync(string filename, IWaveProvider sourceProvider)
- {
- StorageFile fileOperation = await StorageFile.GetFileFromPathAsync(filename);
- Stream fileStream = await fileOperation.OpenStreamForWriteAsync();
-
- using (var writer = new WaveFileWriterRT(fileStream, sourceProvider.WaveFormat))
- {
- writer.filename = filename;
- long outputLength = 0;
- var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4];
- while (true)
- {
- int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length);
- if (bytesRead == 0)
- {
- // end of source provider
- break;
- }
- outputLength += bytesRead;
- // Write will throw exception if WAV file becomes too large
- writer.Write(buffer, 0, bytesRead);
- }
- }
- }
-
- ///
- /// WaveFileWriterRT that actually writes to a stream
- ///
- /// Stream to be written to
- /// Wave format to use
- public WaveFileWriterRT(Stream outStream, WaveFormat format)
- {
- this.outStream = outStream;
- this.format = format;
- this.writer = new BinaryWriter(outStream, System.Text.Encoding.UTF8);
- this.writer.Write(System.Text.Encoding.UTF8.GetBytes("RIFF"));
- this.writer.Write((int)0); // placeholder
- this.writer.Write(System.Text.Encoding.UTF8.GetBytes("WAVE"));
-
- this.writer.Write(System.Text.Encoding.UTF8.GetBytes("fmt "));
- format.Serialize(this.writer);
-
- CreateFactChunk();
- WriteDataChunkHeader();
- }
-
- private void WriteDataChunkHeader()
- {
- this.writer.Write(System.Text.Encoding.UTF8.GetBytes("data"));
- dataSizePos = this.outStream.Position;
- this.writer.Write((int)0); // placeholder
- }
-
- private void CreateFactChunk()
- {
- if (HasFactChunk())
- {
- this.writer.Write(System.Text.Encoding.UTF8.GetBytes("fact"));
- this.writer.Write((int)4);
- factSampleCountPos = this.outStream.Position;
- this.writer.Write((int)0); // number of samples
- }
- }
-
- private bool HasFactChunk()
- {
- return format.Encoding != WaveFormatEncoding.Pcm &&
- format.BitsPerSample != 0;
- }
-
- ///
- /// The wave file name or null if not applicable
- ///
- public string Filename
- {
- get { return filename; }
- }
-
- ///
- /// Number of bytes of audio in the data chunk
- ///
- public override long Length
- {
- get { return dataChunkSize; }
- }
-
- ///
- /// WaveFormat of this wave file
- ///
- public WaveFormat WaveFormat
- {
- get { return format; }
- }
-
- ///
- /// Returns false: Cannot read from a WaveFileWriterRT
- ///
- public override bool CanRead
- {
- get { return false; }
- }
-
- ///
- /// Returns true: Can write to a WaveFileWriterRT
- ///
- public override bool CanWrite
- {
- get { return true; }
- }
-
- ///
- /// Returns false: Cannot seek within a WaveFileWriterRT
- ///
- public override bool CanSeek
- {
- get { return false; }
- }
-
- ///
- /// Returns false: Cannot timeout within a WaveFileWriterRT
- ///
- public override bool CanTimeout
- {
- get { return false; }
- }
-
- ///
- /// CopyToAsync is not supported for a WaveFileWriterRT
- ///
- public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken)
- {
- throw new NotImplementedException("Cannot copy from a WaveFileWriterRT");
- }
-
- ///
- /// Read is not supported for a WaveFileWriterRT
- ///
- public override int Read(byte[] buffer, int offset, int count)
- {
- throw new InvalidOperationException("Cannot read from a WaveFileWriterRT");
- }
-
- ///
- /// ReadAsync is not supported for a WaveFileWriterRT
- ///
- public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
- {
- throw new InvalidOperationException("Cannot read from a WaveFileWriterRT");
- }
-
- ///
- /// ReadByte is not supported for a WaveFileWriterRT
- ///
- public override int ReadByte()
- {
- throw new InvalidOperationException("Cannot read from a WaveFileWriterRT");
- }
-
- ///
- /// Seek is not supported for a WaveFileWriterRT
- ///
- public override long Seek(long offset, SeekOrigin origin)
- {
- throw new InvalidOperationException("Cannot seek within a WaveFileWriterRT");
- }
-
- ///
- /// SetLength is not supported for WaveFileWriterRT
- ///
- ///
- public override void SetLength(long value)
- {
- throw new InvalidOperationException("Cannot set length of a WaveFileWriterRT");
- }
-
- ///
- /// Gets the Position in the WaveFile (i.e. number of bytes written so far)
- ///
- public override long Position
- {
- get { return dataChunkSize; }
- set { throw new InvalidOperationException("Repositioning a WaveFileWriterRT is not supported"); }
- }
-
- ///
- /// Appends bytes to the WaveFile (assumes they are already in the correct format)
- ///
- /// the buffer containing the wave data
- /// the offset from which to start writing
- /// the number of bytes to write
- public override void Write(byte[] data, int offset, int count)
- {
- if (outStream.Length + count > UInt32.MaxValue)
- throw new ArgumentException("WAV file too large", "count");
- outStream.Write(data, offset, count);
- dataChunkSize += count;
- }
-
- ///
- /// Appends bytes to the WaveFile (assumes they are already in the correct format)
- ///
- /// the buffer containing the wave data
- /// the offset from which to start writing
- /// the number of bytes to write
- /// Cancellation token
- public override Task WriteAsync(byte[] buffer, int offset, int count,
- CancellationToken cancellationToken)
- {
- return Task.Run(() =>
- {
- try
- {
- asyncOperationsLock.WaitOne();
- Write(buffer, offset, count);
- }
- finally
- {
- asyncOperationsLock.Release();
- }
- });
- }
-
- ///
- /// WriteByte is not supported for a WaveFileWriterRT
- /// Use instead
- ///
- /// value to write
- public override void WriteByte(byte value)
- {
- throw new NotImplementedException();
- }
-
- private readonly byte[] value24 = new byte[3]; // keep this around to save us creating it every time
-
- ///
- /// Writes a single sample to the Wave file
- ///
- /// the sample to write (assumed floating point with 1.0f as max value)
- public void WriteSample(float sample)
- {
- if (WaveFormat.BitsPerSample == 16)
- {
- writer.Write((Int16)(Int16.MaxValue * sample));
- dataChunkSize += 2;
- }
- else if (WaveFormat.BitsPerSample == 24)
- {
- var value = BitConverter.GetBytes((Int32)(Int32.MaxValue * sample));
- value24[0] = value[1];
- value24[1] = value[2];
- value24[2] = value[3];
- writer.Write(value24);
- dataChunkSize += 3;
- }
- else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.Extensible)
- {
- writer.Write(UInt16.MaxValue * (Int32)sample);
- dataChunkSize += 4;
- }
- else if (WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
- {
- writer.Write(sample);
- dataChunkSize += 4;
- }
- else
- {
- throw new InvalidOperationException("Only 16, 24 or 32 bit PCM or IEEE float audio data supported");
- }
- }
-
- ///
- /// Writes 32 bit floating point samples to the Wave file
- /// They will be converted to the appropriate bit depth depending on the WaveFormat of the WAV file
- ///
- /// The buffer containing the floating point samples
- /// The offset from which to start writing
- /// The number of floating point samples to write
- public void WriteSamples(float[] samples, int offset, int count)
- {
- for (int n = 0; n < count; n++)
- {
- WriteSample(samples[offset + n]);
- }
- }
-
- ///
- /// Writes 16 bit samples to the Wave file
- ///
- /// The buffer containing the 16 bit samples
- /// The offset from which to start writing
- /// The number of 16 bit samples to write
- public void WriteSamples(short[] samples, int offset, int count)
- {
- // 16 bit PCM data
- if (WaveFormat.BitsPerSample == 16)
- {
- for (int sample = 0; sample < count; sample++)
- {
- writer.Write(samples[sample + offset]);
- }
- dataChunkSize += (count * 2);
- }
- // 24 bit PCM data
- else if (WaveFormat.BitsPerSample == 24)
- {
- byte[] value;
- for (int sample = 0; sample < count; sample++)
- {
- value = BitConverter.GetBytes(UInt16.MaxValue * (Int32)samples[sample + offset]);
- value24[0] = value[1];
- value24[1] = value[2];
- value24[2] = value[3];
- writer.Write(value24);
- }
- dataChunkSize += (count * 3);
- }
- // 32 bit PCM data
- else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.Extensible)
- {
- for (int sample = 0; sample < count; sample++)
- {
- writer.Write(UInt16.MaxValue * (Int32)samples[sample + offset]);
- }
- dataChunkSize += (count * 4);
- }
- // IEEE float data
- else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
- {
- for (int sample = 0; sample < count; sample++)
- {
- writer.Write((float)samples[sample + offset] / (float)(Int16.MaxValue + 1));
- }
- dataChunkSize += (count * 4);
- }
- else
- {
- throw new InvalidOperationException("Only 16, 24 or 32 bit PCM or IEEE float audio data supported");
- }
- }
-
- ///
- /// Ensures data is written to disk
- ///
- public override void Flush()
- {
- var pos = writer.BaseStream.Position;
- UpdateHeader(writer);
- writer.BaseStream.Position = pos;
- }
-
- ///
- /// Ensures data is written to disk
- ///
- public override Task FlushAsync(CancellationToken cancellationToken)
- {
- return Task.Run(() =>
- {
- try
- {
- asyncOperationsLock.WaitOne();
- Flush();
- }
- finally
- {
- asyncOperationsLock.Release();
- }
- });
- }
-
- #region IDisposable Members
-
- ///
- /// Actually performs the close,making sure the header contains the correct data
- ///
- /// True if called from Dispose
- protected override void Dispose(bool disposing)
- {
- if (disposing)
- {
- if (outStream != null)
- {
- try
- {
- UpdateHeader(writer);
- }
- finally
- {
- // in a finally block as we don't want the FileStream to run its disposer in
- // the GC thread if the code above caused an IOException (e.g. due to disk full)
- outStream.Dispose(); // will close the underlying base stream
- outStream = null;
- asyncOperationsLock.Dispose();
- }
- }
- }
- }
-
- ///
- /// Updates the header with file size information
- ///
- protected virtual void UpdateHeader(BinaryWriter writer)
- {
- writer.Flush();
- UpdateRiffChunk(writer);
- UpdateFactChunk(writer);
- UpdateDataChunk(writer);
- }
-
- private void UpdateDataChunk(BinaryWriter writer)
- {
- writer.Seek((int)dataSizePos, SeekOrigin.Begin);
- writer.Write((UInt32)dataChunkSize);
- }
-
- private void UpdateRiffChunk(BinaryWriter writer)
- {
- writer.Seek(4, SeekOrigin.Begin);
- writer.Write((UInt32)(outStream.Length - 8));
- }
-
- private void UpdateFactChunk(BinaryWriter writer)
- {
- if (HasFactChunk())
- {
- int bitsPerSample = (format.BitsPerSample * format.Channels);
- if (bitsPerSample != 0)
- {
- writer.Seek((int)factSampleCountPos, SeekOrigin.Begin);
-
- writer.Write((int)((dataChunkSize * 8) / bitsPerSample));
- }
- }
- }
-
- ///
- /// Finaliser - should only be called if the user forgot to close this WaveFileWriterRT
- ///
- ~WaveFileWriterRT()
- {
- System.Diagnostics.Debug.Assert(false, "WaveFileWriterRT was not disposed");
- Dispose(false);
- }
-
- #endregion
- }
-}
+using NAudio.Wave.SampleProviders;
+using System;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using Windows.Storage;
+
+namespace NAudio.Wave
+{
+ ///
+ /// This class writes WAV data to a .wav file on disk
+ ///
+ public class WaveFileWriterRT : Stream
+ {
+ private Stream outStream;
+ private readonly BinaryWriter writer;
+ private long dataSizePos;
+ private long factSampleCountPos;
+ private long dataChunkSize;
+ private readonly WaveFormat format;
+ private string filename;
+
+ // Protects WriteAsync and FlushAsync from overlapping
+ private readonly Semaphore asyncOperationsLock = new Semaphore(1, 100);
+
+ ///
+ /// Creates a 16 bit Wave File from an ISampleProvider
+ /// BEWARE: the source provider must not return data indefinitely
+ ///
+ /// The filename to write to
+ /// The source sample provider
+ public static Task CreateWaveFile16Async(string filename, ISampleProvider sourceProvider)
+ {
+ return CreateWaveFileAsync(filename, new SampleToWaveProvider16(sourceProvider));
+ }
+
+ ///
+ /// Creates a Wave file by reading all the data from a WaveProvider
+ /// BEWARE: the WaveProvider MUST return 0 from its Read method when it is finished,
+ /// or the Wave File will grow indefinitely.
+ ///
+ /// The filename to use
+ /// The source WaveProvider
+ public static async Task CreateWaveFileAsync(string filename, IWaveProvider sourceProvider)
+ {
+ StorageFile fileOperation = await StorageFile.GetFileFromPathAsync(filename);
+ Stream fileStream = await fileOperation.OpenStreamForWriteAsync();
+
+ using (var writer = new WaveFileWriterRT(fileStream, sourceProvider.WaveFormat))
+ {
+ writer.filename = filename;
+ long outputLength = 0;
+ var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4];
+ while (true)
+ {
+ int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length);
+ if (bytesRead == 0)
+ {
+ // end of source provider
+ break;
+ }
+ outputLength += bytesRead;
+ // Write will throw exception if WAV file becomes too large
+ writer.Write(buffer, 0, bytesRead);
+ }
+ }
+ }
+
+ ///
+ /// WaveFileWriterRT that actually writes to a stream
+ ///
+ /// Stream to be written to
+ /// Wave format to use
+ public WaveFileWriterRT(Stream outStream, WaveFormat format)
+ {
+ this.outStream = outStream;
+ this.format = format;
+ this.writer = new BinaryWriter(outStream, System.Text.Encoding.UTF8);
+ this.writer.Write(System.Text.Encoding.UTF8.GetBytes("RIFF"));
+ this.writer.Write((int)0); // placeholder
+ this.writer.Write(System.Text.Encoding.UTF8.GetBytes("WAVE"));
+
+ this.writer.Write(System.Text.Encoding.UTF8.GetBytes("fmt "));
+ format.Serialize(this.writer);
+
+ CreateFactChunk();
+ WriteDataChunkHeader();
+ }
+
+ private void WriteDataChunkHeader()
+ {
+ this.writer.Write(System.Text.Encoding.UTF8.GetBytes("data"));
+ dataSizePos = this.outStream.Position;
+ this.writer.Write((int)0); // placeholder
+ }
+
+ private void CreateFactChunk()
+ {
+ if (HasFactChunk())
+ {
+ this.writer.Write(System.Text.Encoding.UTF8.GetBytes("fact"));
+ this.writer.Write((int)4);
+ factSampleCountPos = this.outStream.Position;
+ this.writer.Write((int)0); // number of samples
+ }
+ }
+
+ private bool HasFactChunk()
+ {
+ return format.Encoding != WaveFormatEncoding.Pcm &&
+ format.BitsPerSample != 0;
+ }
+
+ ///
+ /// The wave file name or null if not applicable
+ ///
+ public string Filename
+ {
+ get { return filename; }
+ }
+
+ ///
+ /// Number of bytes of audio in the data chunk
+ ///
+ public override long Length
+ {
+ get { return dataChunkSize; }
+ }
+
+ ///
+ /// WaveFormat of this wave file
+ ///
+ public WaveFormat WaveFormat
+ {
+ get { return format; }
+ }
+
+ ///
+ /// Returns false: Cannot read from a WaveFileWriterRT
+ ///
+ public override bool CanRead
+ {
+ get { return false; }
+ }
+
+ ///
+ /// Returns true: Can write to a WaveFileWriterRT
+ ///
+ public override bool CanWrite
+ {
+ get { return true; }
+ }
+
+ ///
+ /// Returns false: Cannot seek within a WaveFileWriterRT
+ ///
+ public override bool CanSeek
+ {
+ get { return false; }
+ }
+
+ ///
+ /// Returns false: Cannot timeout within a WaveFileWriterRT
+ ///
+ public override bool CanTimeout
+ {
+ get { return false; }
+ }
+
+ ///
+ /// CopyToAsync is not supported for a WaveFileWriterRT
+ ///
+ public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken)
+ {
+ throw new NotImplementedException("Cannot copy from a WaveFileWriterRT");
+ }
+
+ ///
+ /// Read is not supported for a WaveFileWriterRT
+ ///
+ public override int Read(byte[] buffer, int offset, int count)
+ {
+ throw new InvalidOperationException("Cannot read from a WaveFileWriterRT");
+ }
+
+ ///
+ /// ReadAsync is not supported for a WaveFileWriterRT
+ ///
+ public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
+ {
+ throw new InvalidOperationException("Cannot read from a WaveFileWriterRT");
+ }
+
+ ///
+ /// ReadByte is not supported for a WaveFileWriterRT
+ ///
+ public override int ReadByte()
+ {
+ throw new InvalidOperationException("Cannot read from a WaveFileWriterRT");
+ }
+
+ ///
+ /// Seek is not supported for a WaveFileWriterRT
+ ///
+ public override long Seek(long offset, SeekOrigin origin)
+ {
+ throw new InvalidOperationException("Cannot seek within a WaveFileWriterRT");
+ }
+
+ ///
+ /// SetLength is not supported for WaveFileWriterRT
+ ///
+ ///
+ public override void SetLength(long value)
+ {
+ throw new InvalidOperationException("Cannot set length of a WaveFileWriterRT");
+ }
+
+ ///
+ /// Gets the Position in the WaveFile (i.e. number of bytes written so far)
+ ///
+ public override long Position
+ {
+ get { return dataChunkSize; }
+ set { throw new InvalidOperationException("Repositioning a WaveFileWriterRT is not supported"); }
+ }
+
+ ///
+ /// Appends bytes to the WaveFile (assumes they are already in the correct format)
+ ///
+ /// the buffer containing the wave data
+ /// the offset from which to start writing
+ /// the number of bytes to write
+ public override void Write(byte[] data, int offset, int count)
+ {
+ if (outStream.Length + count > UInt32.MaxValue)
+ throw new ArgumentException("WAV file too large", "count");
+ outStream.Write(data, offset, count);
+ dataChunkSize += count;
+ }
+
+ ///
+ /// Appends bytes to the WaveFile (assumes they are already in the correct format)
+ ///
+ /// the buffer containing the wave data
+ /// the offset from which to start writing
+ /// the number of bytes to write
+ /// Cancellation token
+ public override Task WriteAsync(byte[] buffer, int offset, int count,
+ CancellationToken cancellationToken)
+ {
+ return Task.Run(() =>
+ {
+ try
+ {
+ asyncOperationsLock.WaitOne();
+ Write(buffer, offset, count);
+ }
+ finally
+ {
+ asyncOperationsLock.Release();
+ }
+ });
+ }
+
+ ///
+ /// WriteByte is not supported for a WaveFileWriterRT
+ /// Use instead
+ ///
+ /// value to write
+ public override void WriteByte(byte value)
+ {
+ throw new NotImplementedException();
+ }
+
+ private readonly byte[] value24 = new byte[3]; // keep this around to save us creating it every time
+
+ ///
+ /// Writes a single sample to the Wave file
+ ///
+ /// the sample to write (assumed floating point with 1.0f as max value)
+ public void WriteSample(float sample)
+ {
+ if (WaveFormat.BitsPerSample == 16)
+ {
+ writer.Write((Int16)(Int16.MaxValue * sample));
+ dataChunkSize += 2;
+ }
+ else if (WaveFormat.BitsPerSample == 24)
+ {
+ var value = BitConverter.GetBytes((Int32)(Int32.MaxValue * sample));
+ value24[0] = value[1];
+ value24[1] = value[2];
+ value24[2] = value[3];
+ writer.Write(value24);
+ dataChunkSize += 3;
+ }
+ else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.Extensible)
+ {
+ writer.Write(UInt16.MaxValue * (Int32)sample);
+ dataChunkSize += 4;
+ }
+ else if (WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
+ {
+ writer.Write(sample);
+ dataChunkSize += 4;
+ }
+ else
+ {
+ throw new InvalidOperationException("Only 16, 24 or 32 bit PCM or IEEE float audio data supported");
+ }
+ }
+
+ ///
+ /// Writes 32 bit floating point samples to the Wave file
+ /// They will be converted to the appropriate bit depth depending on the WaveFormat of the WAV file
+ ///
+ /// The buffer containing the floating point samples
+ /// The offset from which to start writing
+ /// The number of floating point samples to write
+ public void WriteSamples(float[] samples, int offset, int count)
+ {
+ for (int n = 0; n < count; n++)
+ {
+ WriteSample(samples[offset + n]);
+ }
+ }
+
+ ///
+ /// Writes 16 bit samples to the Wave file
+ ///
+ /// The buffer containing the 16 bit samples
+ /// The offset from which to start writing
+ /// The number of 16 bit samples to write
+ public void WriteSamples(short[] samples, int offset, int count)
+ {
+ // 16 bit PCM data
+ if (WaveFormat.BitsPerSample == 16)
+ {
+ for (int sample = 0; sample < count; sample++)
+ {
+ writer.Write(samples[sample + offset]);
+ }
+ dataChunkSize += (count * 2);
+ }
+ // 24 bit PCM data
+ else if (WaveFormat.BitsPerSample == 24)
+ {
+ byte[] value;
+ for (int sample = 0; sample < count; sample++)
+ {
+ value = BitConverter.GetBytes(UInt16.MaxValue * (Int32)samples[sample + offset]);
+ value24[0] = value[1];
+ value24[1] = value[2];
+ value24[2] = value[3];
+ writer.Write(value24);
+ }
+ dataChunkSize += (count * 3);
+ }
+ // 32 bit PCM data
+ else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.Extensible)
+ {
+ for (int sample = 0; sample < count; sample++)
+ {
+ writer.Write(UInt16.MaxValue * (Int32)samples[sample + offset]);
+ }
+ dataChunkSize += (count * 4);
+ }
+ // IEEE float data
+ else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
+ {
+ for (int sample = 0; sample < count; sample++)
+ {
+ writer.Write((float)samples[sample + offset] / (float)(Int16.MaxValue + 1));
+ }
+ dataChunkSize += (count * 4);
+ }
+ else
+ {
+ throw new InvalidOperationException("Only 16, 24 or 32 bit PCM or IEEE float audio data supported");
+ }
+ }
+
+ ///
+ /// Ensures data is written to disk
+ ///
+ public override void Flush()
+ {
+ var pos = writer.BaseStream.Position;
+ UpdateHeader(writer);
+ writer.BaseStream.Position = pos;
+ }
+
+ ///
+ /// Ensures data is written to disk
+ ///
+ public override Task FlushAsync(CancellationToken cancellationToken)
+ {
+ return Task.Run(() =>
+ {
+ try
+ {
+ asyncOperationsLock.WaitOne();
+ Flush();
+ }
+ finally
+ {
+ asyncOperationsLock.Release();
+ }
+ });
+ }
+
+ #region IDisposable Members
+
+ ///
+ /// Actually performs the close,making sure the header contains the correct data
+ ///
+ /// True if called from Dispose
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing)
+ {
+ if (outStream != null)
+ {
+ try
+ {
+ UpdateHeader(writer);
+ }
+ finally
+ {
+ // in a finally block as we don't want the FileStream to run its disposer in
+ // the GC thread if the code above caused an IOException (e.g. due to disk full)
+ outStream.Dispose(); // will close the underlying base stream
+ outStream = null;
+ asyncOperationsLock.Dispose();
+ }
+ }
+ }
+ }
+
+ ///
+ /// Updates the header with file size information
+ ///
+ protected virtual void UpdateHeader(BinaryWriter writer)
+ {
+ writer.Flush();
+ UpdateRiffChunk(writer);
+ UpdateFactChunk(writer);
+ UpdateDataChunk(writer);
+ }
+
+ private void UpdateDataChunk(BinaryWriter writer)
+ {
+ writer.Seek((int)dataSizePos, SeekOrigin.Begin);
+ writer.Write((UInt32)dataChunkSize);
+ }
+
+ private void UpdateRiffChunk(BinaryWriter writer)
+ {
+ writer.Seek(4, SeekOrigin.Begin);
+ writer.Write((UInt32)(outStream.Length - 8));
+ }
+
+ private void UpdateFactChunk(BinaryWriter writer)
+ {
+ if (HasFactChunk())
+ {
+ int bitsPerSample = (format.BitsPerSample * format.Channels);
+ if (bitsPerSample != 0)
+ {
+ writer.Seek((int)factSampleCountPos, SeekOrigin.Begin);
+
+ writer.Write((int)((dataChunkSize * 8) / bitsPerSample));
+ }
+ }
+ }
+
+ ///
+ /// Finaliser - should only be called if the user forgot to close this WaveFileWriterRT
+ ///
+ ~WaveFileWriterRT()
+ {
+ System.Diagnostics.Debug.Assert(false, "WaveFileWriterRT was not disposed");
+ Dispose(false);
+ }
+
+ #endregion
+ }
+}
diff --git a/NAudio.Win8.sln b/NAudio.Win8.sln
deleted file mode 100644
index cf2de0a1..00000000
--- a/NAudio.Win8.sln
+++ /dev/null
@@ -1,68 +0,0 @@
-
-Microsoft Visual Studio Solution File, Format Version 12.00
-# Visual Studio 14
-VisualStudioVersion = 14.0.25420.1
-MinimumVisualStudioVersion = 10.0.40219.1
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F91DCE20-90A9-49A8-8773-CACECE651F37}"
-EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "NAudio.Win8", "NAudio.Win8\NAudio.Win8.csproj", "{90543F38-E793-40C3-972D-3271EBF1DEF4}"
-EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "NAudioWin8Demo", "NAudioWin8Demo\NAudioWin8Demo.csproj", "{03A0E22E-5B00-4B87-9CDF-20CC121DCF03}"
-EndProject
-Global
- GlobalSection(SolutionConfigurationPlatforms) = preSolution
- Debug|Any CPU = Debug|Any CPU
- Debug|ARM = Debug|ARM
- Debug|x64 = Debug|x64
- Debug|x86 = Debug|x86
- Release|Any CPU = Release|Any CPU
- Release|ARM = Release|ARM
- Release|x64 = Release|x64
- Release|x86 = Release|x86
- EndGlobalSection
- GlobalSection(ProjectConfigurationPlatforms) = postSolution
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|ARM.ActiveCfg = Debug|ARM
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|ARM.Build.0 = Debug|ARM
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|x64.ActiveCfg = Debug|x64
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|x64.Build.0 = Debug|x64
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|x86.ActiveCfg = Debug|x86
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|x86.Build.0 = Debug|x86
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|Any CPU.ActiveCfg = Release|Any CPU
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|Any CPU.Build.0 = Release|Any CPU
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|ARM.ActiveCfg = Release|ARM
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|ARM.Build.0 = Release|ARM
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|x64.ActiveCfg = Release|x64
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|x64.Build.0 = Release|x64
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|x86.ActiveCfg = Release|x86
- {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|x86.Build.0 = Release|x86
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|Any CPU.Deploy.0 = Debug|Any CPU
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|ARM.ActiveCfg = Debug|ARM
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|ARM.Build.0 = Debug|ARM
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|ARM.Deploy.0 = Debug|ARM
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x64.ActiveCfg = Debug|x64
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x64.Build.0 = Debug|x64
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x64.Deploy.0 = Debug|x64
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x86.ActiveCfg = Debug|x86
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x86.Build.0 = Debug|x86
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x86.Deploy.0 = Debug|x86
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|Any CPU.ActiveCfg = Release|Any CPU
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|Any CPU.Build.0 = Release|Any CPU
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|Any CPU.Deploy.0 = Release|Any CPU
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|ARM.ActiveCfg = Release|ARM
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|ARM.Build.0 = Release|ARM
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|ARM.Deploy.0 = Release|ARM
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x64.ActiveCfg = Release|x64
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x64.Build.0 = Release|x64
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x64.Deploy.0 = Release|x64
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x86.ActiveCfg = Release|x86
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x86.Build.0 = Release|x86
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x86.Deploy.0 = Release|x86
- EndGlobalSection
- GlobalSection(SolutionProperties) = preSolution
- HideSolutionNode = FALSE
- EndGlobalSection
-EndGlobal
diff --git a/NAudio.Win8.sln.DotSettings b/NAudio.Win8.sln.DotSettings
deleted file mode 100644
index e3bb2d5c..00000000
--- a/NAudio.Win8.sln.DotSettings
+++ /dev/null
@@ -1,6 +0,0 @@
-
- System
- System.Linq
- <Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" />
- <Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" />
- <Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" />
\ No newline at end of file
diff --git a/NAudio.Win8/NAudio.Win8.csproj b/NAudio.Win8/NAudio.Win8.csproj
deleted file mode 100644
index 035339eb..00000000
--- a/NAudio.Win8/NAudio.Win8.csproj
+++ /dev/null
@@ -1,837 +0,0 @@
-
-
-
-
- Debug
- AnyCPU
- 8.0.30703
- 2.0
- {90543F38-E793-40C3-972D-3271EBF1DEF4}
- Library
- Properties
- NAudio.Win8
- NAudio.Win8
- en-US
- 512
- {BC8A1FFA-BEE3-4634-8014-F334798102B3};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}
- 8.1
- 12
-
-
-
- true
- full
- false
- bin\Debug\
- DEBUG;TRACE;NETFX_CORE
- prompt
- 4
- false
- bin\Debug\NAudio.Win8.XML
-
-
- pdbonly
- true
- bin\Release\
- TRACE;NETFX_CORE
- prompt
- 4
- false
- bin\Release\NAudio.Win8.XML
-
-
- true
- bin\ARM\Debug\
- DEBUG;TRACE;NETFX_CORE
- ;2008
- full
- ARM
- false
- prompt
- true
-
-
- bin\ARM\Release\
- TRACE;NETFX_CORE
- true
- ;2008
- pdbonly
- ARM
- false
- prompt
- true
-
-
- true
- bin\x64\Debug\
- DEBUG;TRACE;NETFX_CORE
- ;2008
- full
- x64
- false
- prompt
- true
-
-
- bin\x64\Release\
- TRACE;NETFX_CORE
- true
- ;2008
- pdbonly
- x64
- false
- prompt
- true
-
-
- true
- bin\x86\Debug\
- DEBUG;TRACE;NETFX_CORE
- ;2008
- full
- x86
- false
- prompt
- true
-
-
- bin\x86\Release\
- TRACE;NETFX_CORE
- true
- ;2008
- pdbonly
- x86
- false
- prompt
- true
-
-
-
-
-
-
- Codecs\ALawDecoder.cs
-
-
- Codecs\ALawEncoder.cs
-
-
- Codecs\G722Codec.cs
-
-
- Codecs\MuLawDecoder.cs
-
-
- Codecs\MuLawEncoder.cs
-
-
- CoreAudioApi\AudioCaptureClient.cs
-
-
- CoreAudioApi\AudioClient.cs
-
-
- CoreAudioApi\AudioClientBufferFlags.cs
-
-
- CoreAudioApi\AudioClientProperties.cs
-
-
- CoreAudioApi\AudioClientShareMode.cs
-
-
- CoreAudioApi\AudioClientStreamFlags.cs
-
-
- CoreAudioApi\AudioClientStreamOptions.cs
-
-
- CoreAudioApi\AudioClockClient.cs
-
-
- CoreAudioApi\AudioEndpointVolume.cs
-
-
- CoreAudioApi\AudioEndpointVolumeCallback.cs
-
-
- CoreAudioApi\AudioEndpointVolumeChannel.cs
-
-
- CoreAudioApi\AudioEndpointVolumeChannels.cs
-
-
- CoreAudioApi\AudioEndpointVolumeNotificationDelegate.cs
-
-
- CoreAudioApi\AudioEndpointVolumeStepInformation.cs
-
-
- CoreAudioApi\AudioEndpointVolumeVolumeRange.cs
-
-
- CoreAudioApi\AudioMeterInformation.cs
-
-
- CoreAudioApi\AudioMeterInformationChannels.cs
-
-
- CoreAudioApi\AudioRenderClient.cs
-
-
- CoreAudioApi\AudioSessionControl.cs
-
-
- CoreAudioApi\AudioSessionEventsCallback.cs
-
-
- CoreAudioApi\AudioSessionManager.cs
-
-
- CoreAudioApi\AudioSessionNotification.cs
-
-
- CoreAudioApi\AudioStreamCategory.cs
-
-
- CoreAudioApi\AudioStreamVolume.cs
-
-
- CoreAudioApi\AudioVolumeNotificationData.cs
-
-
- CoreAudioApi\DataFlow.cs
-
-
- CoreAudioApi\DeviceState.cs
-
-
- CoreAudioApi\EEndpointHardwareSupport.cs
-
-
- CoreAudioApi\Interfaces\AudioVolumeNotificationDataStruct.cs
-
-
- CoreAudioApi\Interfaces\Blob.cs
-
-
- CoreAudioApi\Interfaces\ClsCtx.cs
-
-
- CoreAudioApi\Interfaces\ErrorCodes.cs
-
-
- CoreAudioApi\Interfaces\IAudioCaptureClient.cs
-
-
- CoreAudioApi\Interfaces\IAudioClient.cs
-
-
- CoreAudioApi\Interfaces\IAudioClock2.cs
-
-
- CoreAudioApi\Interfaces\IAudioEndpointVolume.cs
-
-
- CoreAudioApi\Interfaces\IAudioEndpointVolumeCallback.cs
-
-
- CoreAudioApi\Interfaces\IAudioMeterInformation.cs
-
-
- CoreAudioApi\Interfaces\IAudioRenderClient.cs
-
-
- CoreAudioApi\Interfaces\IAudioSessionControl.cs
-
-
- CoreAudioApi\Interfaces\IAudioSessionEnumerator.cs
-
-
- CoreAudioApi\Interfaces\IAudioSessionEvents.cs
-
-
- CoreAudioApi\Interfaces\IAudioSessionEventsHandler.cs
-
-
- CoreAudioApi\Interfaces\IAudioSessionManager.cs
-
-
- CoreAudioApi\Interfaces\IAudioSessionNotification.cs
-
-
- CoreAudioApi\Interfaces\IAudioStreamVolume.cs
-
-
- CoreAudioApi\Interfaces\IMMDevice.cs
-
-
- CoreAudioApi\Interfaces\IMMDeviceCollection.cs
-
-
- CoreAudioApi\Interfaces\IMMDeviceEnumerator.cs
-
-
- CoreAudioApi\Interfaces\IMMEndpoint.cs
-
-
- CoreAudioApi\Interfaces\IMMNotificationClient.cs
-
-
- CoreAudioApi\Interfaces\IPropertyStore.cs
-
-
- CoreAudioApi\Interfaces\ISimpleAudioVolume.cs
-
-
- CoreAudioApi\Interfaces\MMDeviceEnumeratorComObject.cs
-
-
- CoreAudioApi\Interfaces\StorageAccessMode.cs
-
-
- CoreAudioApi\MMDevice.cs
-
-
- CoreAudioApi\MMDeviceCollection.cs
-
-
- CoreAudioApi\MMDeviceEnumerator.cs
-
-
- CoreAudioApi\PropertyKey.cs
-
-
- CoreAudioApi\PropertyKeys.cs
-
-
- CoreAudioApi\PropertyStore.cs
-
-
- CoreAudioApi\PropertyStoreProperty.cs
-
-
- CoreAudioApi\PropVariant.cs
-
-
- CoreAudioApi\PropVariantNative.cs
-
-
- CoreAudioApi\Role.cs
-
-
- CoreAudioApi\SessionCollection.cs
-
-
- CoreAudioApi\SimpleAudioVolume.cs
-
-
- Dmo\AudioMediaSubtypes.cs
-
-
- Dmo\IWMResamplerProps.cs
-
-
- Dsp\BiQuadFilter.cs
-
-
- Dsp\Complex.cs
-
-
- Dsp\EnvelopeDetector.cs
-
-
- Dsp\EnvelopeGenerator.cs
-
-
- Dsp\FastFourierTransform.cs
-
-
- Dsp\ImpulseResponseConvolution.cs
-
-
- Dsp\SimpleCompressor.cs
-
-
- Dsp\SimpleGate.cs
-
-
- Dsp\WdlResampler.cs
-
-
- FileFormats\Mp3\ChannelMode.cs
-
-
- FileFormats\Mp3\Id3v2Tag.cs
-
-
- FileFormats\Mp3\IMp3FrameDecompressor.cs
-
-
- FileFormats\Mp3\Mp3Frame.cs
-
-
- FileFormats\Mp3\MpegLayer.cs
-
-
- FileFormats\Mp3\MpegVersion.cs
-
-
- FileFormats\Mp3\XingHeader.cs
-
-
- FileFormats\SoundFont\Generator.cs
-
-
- FileFormats\SoundFont\GeneratorBuilder.cs
-
-
- FileFormats\SoundFont\GeneratorEnum.cs
-
-
- FileFormats\SoundFont\InfoChunk.cs
-
-
- FileFormats\SoundFont\Instrument.cs
-
-
- FileFormats\SoundFont\InstrumentBuilder.cs
-
-
- FileFormats\SoundFont\Modulator.cs
-
-
- FileFormats\SoundFont\ModulatorBuilder.cs
-
-
- FileFormats\SoundFont\ModulatorType.cs
-
-
- FileFormats\SoundFont\Preset.cs
-
-
- FileFormats\SoundFont\PresetBuilder.cs
-
-
- FileFormats\SoundFont\PresetsChunk.cs
-
-
- FileFormats\SoundFont\RiffChunk.cs
-
-
- FileFormats\SoundFont\SampleDataChunk.cs
-
-
- FileFormats\SoundFont\SampleHeader.cs
-
-
- FileFormats\SoundFont\SampleHeaderBuilder.cs
-
-
- FileFormats\SoundFont\SampleMode.cs
-
-
- FileFormats\SoundFont\SFSampleLink.cs
-
-
- FileFormats\SoundFont\SFVersion.cs
-
-
- FileFormats\SoundFont\SFVersionBuilder.cs
-
-
- FileFormats\SoundFont\SoundFont.cs
-
-
- FileFormats\SoundFont\StructureBuilder.cs
-
-
- FileFormats\SoundFont\Zone.cs
-
-
- FileFormats\SoundFont\ZoneBuilder.cs
-
-
- MediaFoundation\AudioSubtypes.cs
-
-
- MediaFoundation\IMFActivate.cs
-
-
- MediaFoundation\IMFAttributes.cs
-
-
- MediaFoundation\IMFByteStream.cs
-
-
- MediaFoundation\IMFCollection.cs
-
-
- MediaFoundation\IMFMediaBuffer.cs
-
-
- MediaFoundation\IMFMediaEvent.cs
-
-
- MediaFoundation\IMFMediaType.cs
-
-
- MediaFoundation\IMFReadWriteClassFactory.cs
-
-
- MediaFoundation\IMFSample.cs
-
-
- MediaFoundation\IMFSinkWriter.cs
-
-
- MediaFoundation\IMFSourceReader.cs
-
-
- MediaFoundation\IMFTransform.cs
-
-
- MediaFoundation\MediaEventType.cs
-
-
- MediaFoundation\MediaFoundationAttributes.cs
-
-
- MediaFoundation\MediaFoundationErrors.cs
-
-
- MediaFoundation\MediaFoundationHelpers.cs
-
-
- MediaFoundation\MediaFoundationInterop.cs
-
-
- MediaFoundation\MediaFoundationTransform.cs
-
-
- MediaFoundation\MediaFoundationTransformCategories.cs
-
-
- MediaFoundation\MediaType.cs
-
-
- MediaFoundation\MediaTypes.cs
-
-
- MediaFoundation\MFT_INPUT_STREAM_INFO.cs
-
-
- MediaFoundation\MFT_MESSAGE_TYPE.cs
-
-
- MediaFoundation\MFT_OUTPUT_DATA_BUFFER.cs
-
-
- MediaFoundation\MFT_OUTPUT_STREAM_INFO.cs
-
-
- MediaFoundation\MFT_REGISTER_TYPE_INFO.cs
-
-
- MediaFoundation\MF_SINK_WRITER_STATISTICS.cs
-
-
- MediaFoundation\_MFT_ENUM_FLAG.cs
-
-
- MediaFoundation\_MFT_INPUT_STATUS_FLAGS.cs
-
-
- MediaFoundation\_MFT_INPUT_STREAM_INFO_FLAGS.cs
-
-
- MediaFoundation\_MFT_OUTPUT_DATA_BUFFER_FLAGS.cs
-
-
- MediaFoundation\_MFT_OUTPUT_STATUS_FLAGS.cs
-
-
- MediaFoundation\_MFT_OUTPUT_STREAM_INFO_FLAGS.cs
-
-
- MediaFoundation\_MFT_PROCESS_OUTPUT_FLAGS.cs
-
-
- MediaFoundation\_MFT_PROCESS_OUTPUT_STATUS.cs
-
-
- MediaFoundation\_MFT_SET_TYPE_FLAGS.cs
-
-
- Utils\BufferHelpers.cs
-
-
- Utils\ByteArrayExtensions.cs
-
-
- Utils\ByteEncoding.cs
-
-
- Utils\CircularBuffer.cs
-
-
- Utils\Decibels.cs
-
-
- Utils\FieldDescriptionAttribute.cs
-
-
- Utils\FieldDescriptionHelper.cs
-
-
- Utils\HResult.cs
-
-
- Utils\IEEE.cs
-
-
- Utils\IgnoreDisposeStream.cs
-
-
- Utils\MarshalHelpers.cs
-
-
- Utils\MergeSort.cs
-
-
- Utils\NativeMethods.cs
-
-
- Wave\WaveInputs\WaveInEventArgs.cs
-
-
- Wave\SampleChunkConverters\ISampleChunkConverter.cs
-
-
- Wave\SampleChunkConverters\Mono16SampleChunkConverter.cs
-
-
- Wave\SampleChunkConverters\Mono24SampleChunkConverter.cs
-
-
- Wave\SampleChunkConverters\Mono8SampleChunkConverter.cs
-
-
- Wave\SampleChunkConverters\MonoFloatSampleChunkConverter.cs
-
-
- Wave\SampleChunkConverters\Stereo16SampleChunkConverter.cs
-
-
- Wave\SampleChunkConverters\Stereo24SampleChunkConverter.cs
-
-
- Wave\SampleChunkConverters\Stereo8SampleChunkConverter.cs
-
-
- Wave\SampleChunkConverters\StereoFloatSampleChunkConverter.cs
-
-
- Wave\SampleProviders\ConcatenatingSampleProvider.cs
-
-
- Wave\SampleProviders\FadeInOutSampleProvider.cs
-
-
- Wave\SampleProviders\MeteringSampleProvider.cs
-
-
- Wave\SampleProviders\MixingSampleProvider.cs
-
-
- Wave\SampleProviders\MonoToStereoSampleProvider.cs
-
-
- Wave\SampleProviders\MultiplexingSampleProvider.cs
-
-
- Wave\SampleProviders\NotifyingSampleProvider.cs
-
-
- Wave\SampleProviders\OffsetSampleProvider.cs
-
-
- Wave\SampleProviders\PanningSampleProvider.cs
-
-
- Wave\SampleProviders\Pcm16BitToSampleProvider.cs
-
-
- Wave\SampleProviders\Pcm24BitToSampleProvider.cs
-
-
- Wave\SampleProviders\Pcm32BitToSampleProvider.cs
-
-
- Wave\SampleProviders\Pcm8BitToSampleProvider.cs
-
-
- Wave\SampleProviders\SampleChannel.cs
-
-
- Wave\SampleProviders\SampleProviderConverterBase.cs
-
-
- Wave\SampleProviders\SampleProviderConverters.cs
-
-
- Wave\SampleProviders\SampleToWaveProvider.cs
-
-
- Wave\SampleProviders\SampleToWaveProvider16.cs
-
-
- Wave\SampleProviders\SampleToWaveProvider24.cs
-
-
- Wave\SampleProviders\SignalGenerator.cs
-
-
- Wave\SampleProviders\StereoToMonoSampleProvider.cs
-
-
- Wave\SampleProviders\VolumeSampleProvider.cs
-
-
- Wave\SampleProviders\WaveToSampleProvider.cs
-
-
- Wave\SampleProviders\WaveToSampleProvider64.cs
-
-
- Wave\SampleProviders\WdlResamplingSampleProvider.cs
-
-
- Wave\WaveExtensionMethods.cs
-
-
- Wave\WaveFormats\AdpcmWaveFormat.cs
-
-
- Wave\WaveFormats\Gsm610WaveFormat.cs
-
-
- Wave\WaveFormats\ImaAdpcmWaveFormat.cs
-
-
- Wave\WaveFormats\Mp3WaveFormat.cs
-
-
- Wave\WaveFormats\OggWaveFormat.cs
-
-
- Wave\WaveFormats\TrueSpeechWaveFormat.cs
-
-
- Wave\WaveFormats\WaveFormat.cs
-
-
- Wave\WaveFormats\WaveFormatEncoding.cs
-
-
- Wave\WaveFormats\WaveFormatExtensible.cs
-
-
- Wave\WaveFormats\WaveFormatExtraData.cs
-
-
- Wave\WaveFormats\WmaWaveFormat.cs
-
-
- Wave\WaveInputs\IWaveIn.cs
-
-
- Wave\WaveOutputs\IWaveBuffer.cs
-
-
- Wave\WaveOutputs\IWaveProvider.cs
-
-
- Wave\WaveOutputs\IWaveProviderFloat.cs
-
-
- Wave\WaveOutputs\PlaybackState.cs
-
-
- Wave\WaveOutputs\StoppedEventArgs.cs
-
-
- Wave\WaveOutputs\WaveBuffer.cs
-
-
- Wave\WaveProviders\BufferedWaveProvider.cs
-
-
- Wave\WaveProviders\MonoToStereoProvider16.cs
-
-
- Wave\WaveProviders\MultiplexingWaveProvider.cs
-
-
- Wave\WaveProviders\StereoToMonoProvider16.cs
-
-
- Wave\WaveProviders\VolumeWaveProvider16.cs
-
-
- Wave\WaveProviders\Wave16toFloatProvider.cs
-
-
- Wave\WaveProviders\WaveFloatTo16Provider.cs
-
-
- Wave\WaveProviders\WaveInProvider.cs
-
-
- Wave\WaveProviders\WaveProvider16.cs
-
-
- Wave\WaveProviders\WaveProvider32.cs
-
-
- Wave\WaveStreams\BlockAlignReductionStream.cs
-
-
- Wave\WaveStreams\ISampleNotifier.cs
-
-
- Wave\WaveStreams\MediaFoundationReader.cs
-
-
- Wave\WaveStreams\RawSourceWaveStream.cs
-
-
- Wave\WaveStreams\SimpleCompressorStream.cs
-
-
- Wave\WaveStreams\WaveChannel32.cs
-
-
- Wave\WaveStreams\WaveOffsetStream.cs
-
-
- Wave\WaveStreams\WaveStream.cs
-
-
-
-
-
-
-
-
- 12.0
-
-
-
-
\ No newline at end of file
diff --git a/NAudio.Win8/Properties/AssemblyInfo.cs b/NAudio.Win8/Properties/AssemblyInfo.cs
deleted file mode 100644
index 22b7f499..00000000
--- a/NAudio.Win8/Properties/AssemblyInfo.cs
+++ /dev/null
@@ -1,29 +0,0 @@
-using System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-// General Information about an assembly is controlled through the following
-// set of attributes. Change these attribute values to modify the information
-// associated with an assembly.
-[assembly: AssemblyTitle("NAudio.Win8")]
-[assembly: AssemblyDescription("")]
-[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("")]
-[assembly: AssemblyProduct("NAudio.Win8")]
-[assembly: AssemblyCopyright("Copyright © Mark Heath 2013")]
-[assembly: AssemblyTrademark("")]
-[assembly: AssemblyCulture("")]
-
-// Version information for an assembly consists of the following four values:
-//
-// Major Version
-// Minor Version
-// Build Number
-// Revision
-//
-// You can specify all the values or you can default the Build and Revision Numbers
-// by using the '*' as shown below:
-// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.7.0.15")]
-[assembly: AssemblyFileVersion("1.7.0.15")]
-[assembly: ComVisible(false)]
\ No newline at end of file
diff --git a/NAudio.Win8/Wave/WaveOutputs/IWavePlayer.cs b/NAudio.Win8/Wave/WaveOutputs/IWavePlayer.cs
deleted file mode 100644
index 6605a3d9..00000000
--- a/NAudio.Win8/Wave/WaveOutputs/IWavePlayer.cs
+++ /dev/null
@@ -1,53 +0,0 @@
-using System;
-using System.Linq;
-using System.Threading.Tasks;
-
-namespace NAudio.Wave
-{
- ///
- /// Represents the interface to a device that can play audio
- ///
- public interface IWavePlayer : IDisposable
- {
- ///
- /// Begin playback
- ///
- void Play();
-
- ///
- /// Stop playback
- ///
- void Stop();
-
- ///
- /// Pause Playback
- ///
- void Pause();
-
- ///
- /// Obsolete init method
- ///
- ///
- ///
- [Obsolete]
- Task Init(IWaveProvider waveProvider);
-
- ///
- /// Initialise playback
- ///
- /// Function to create the waveprovider to be played
- /// Called on the playback thread
- void Init(Func waveProviderFunc);
-
- ///
- /// Current playback state
- ///
- PlaybackState PlaybackState { get; }
-
- ///
- /// Indicates that playback has gone into a stopped state due to
- /// reaching the end of the input stream or an error has been encountered during playback
- ///
- event EventHandler PlaybackStopped;
- }
-}
diff --git a/NAudio.nuspec b/NAudio.nuspec
index 86251ef2..6f14be8f 100644
--- a/NAudio.nuspec
+++ b/NAudio.nuspec
@@ -13,12 +13,10 @@
C# .NET audio sound
-
-
-
-
+
+
diff --git a/NAudio/Properties/AssemblyInfo.cs b/NAudio/Properties/AssemblyInfo.cs
index 97418b5b..5f9cc874 100644
--- a/NAudio/Properties/AssemblyInfo.cs
+++ b/NAudio/Properties/AssemblyInfo.cs
@@ -32,5 +32,5 @@
//
// You can specify all the values or you can default the Revision and Build Numbers
// by using the '*' as shown below:
-[assembly: AssemblyVersion("1.8.3.0")]
-[assembly: AssemblyFileVersion("1.8.3.0")]
+[assembly: AssemblyVersion("1.8.4.0")]
+[assembly: AssemblyFileVersion("1.8.4.0")]
diff --git a/NAudioUniversalDemo/MainPageViewModel.cs b/NAudioUniversalDemo/MainPageViewModel.cs
index 9fa641e8..04eafec8 100644
--- a/NAudioUniversalDemo/MainPageViewModel.cs
+++ b/NAudioUniversalDemo/MainPageViewModel.cs
@@ -1,6 +1,5 @@
using System;
using System.Linq;
-using System.Threading.Tasks;
using Windows.Storage.Streams;
using NAudio.CoreAudioApi;
using NAudio.Wave;
@@ -34,18 +33,12 @@ public MainPageViewModel()
private void Stop()
{
- if (player != null)
- {
- player.Stop();
- }
+ player?.Stop();
}
private void Pause()
{
- if (player != null)
- {
- player.Pause();
- }
+ player?.Pause();
}
private void Play()
@@ -116,10 +109,7 @@ private async void RecorderOnDataAvailable(object sender, WaveInEventArgs waveIn
private void StopRecording()
{
- if (recorder != null)
- {
- recorder.StopRecording();
- }
+ recorder?.StopRecording();
}
private void RecorderOnRecordingStopped(object sender, StoppedEventArgs stoppedEventArgs)
@@ -157,16 +147,16 @@ private async void Load()
if (file == null) return;
var stream = await file.OpenAsync(FileAccessMode.Read);
if (stream == null) return;
- this.selectedStream = stream;
+ selectedStream = stream;
PlayCommand.IsEnabled = true;
}
- public DelegateCommand LoadCommand { get; private set; }
- public DelegateCommand PlayCommand { get; private set; }
- public DelegateCommand PauseCommand { get; private set; }
- public DelegateCommand StopCommand { get; private set; }
- public DelegateCommand RecordCommand { get; private set; }
- public DelegateCommand StopRecordingCommand { get; private set; }
+ public DelegateCommand LoadCommand { get; }
+ public DelegateCommand PlayCommand { get; }
+ public DelegateCommand PauseCommand { get; }
+ public DelegateCommand StopCommand { get; }
+ public DelegateCommand RecordCommand { get; }
+ public DelegateCommand StopRecordingCommand { get; }
public MediaElement MediaElement { get; set; }
}
diff --git a/NAudioUniversalDemo/MediaFoundationReaderUniversal.cs b/NAudioUniversalDemo/MediaFoundationReaderUniversal.cs
index 22b371a9..5bbd546d 100644
--- a/NAudioUniversalDemo/MediaFoundationReaderUniversal.cs
+++ b/NAudioUniversalDemo/MediaFoundationReaderUniversal.cs
@@ -15,7 +15,7 @@ public class MediaFoundationReaderUniversalSettings : MediaFoundationReaderSetti
public MediaFoundationReaderUniversalSettings()
{
// can't recreate since we're using a file stream
- this.SingleReaderObject = true;
+ SingleReaderObject = true;
}
public IRandomAccessStream Stream { get; set; }
@@ -56,9 +56,9 @@ protected override IMFSourceReader CreateReader(MediaFoundationReaderSettings se
protected override void Dispose(bool disposing)
{
- if (disposing && settings.Stream != null)
+ if (disposing)
{
- settings.Stream.Dispose();
+ settings.Stream?.Dispose();
}
base.Dispose(disposing);
}
diff --git a/NAudioUniversalDemo/Properties/AssemblyInfo.cs b/NAudioUniversalDemo/Properties/AssemblyInfo.cs
index 1db0c95e..ac8cfdb4 100644
--- a/NAudioUniversalDemo/Properties/AssemblyInfo.cs
+++ b/NAudioUniversalDemo/Properties/AssemblyInfo.cs
@@ -10,7 +10,7 @@
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("NAudioUniversalDemo")]
-[assembly: AssemblyCopyright("Copyright © 2015")]
+[assembly: AssemblyCopyright("Copyright © 2017")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
diff --git a/NAudioWin8Demo/App.xaml b/NAudioWin8Demo/App.xaml
deleted file mode 100644
index eab92a74..00000000
--- a/NAudioWin8Demo/App.xaml
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/NAudioWin8Demo/App.xaml.cs b/NAudioWin8Demo/App.xaml.cs
deleted file mode 100644
index 2e811b77..00000000
--- a/NAudioWin8Demo/App.xaml.cs
+++ /dev/null
@@ -1,90 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.IO;
-using System.Linq;
-using Windows.ApplicationModel;
-using Windows.ApplicationModel.Activation;
-using Windows.Foundation;
-using Windows.Foundation.Collections;
-using Windows.UI.Xaml;
-using Windows.UI.Xaml.Controls;
-using Windows.UI.Xaml.Controls.Primitives;
-using Windows.UI.Xaml.Data;
-using Windows.UI.Xaml.Input;
-using Windows.UI.Xaml.Media;
-using Windows.UI.Xaml.Navigation;
-
-// The Blank Application template is documented at http://go.microsoft.com/fwlink/?LinkId=234227
-
-namespace NAudioWin8Demo
-{
- ///
- /// Provides application-specific behavior to supplement the default Application class.
- ///
- sealed partial class App : Application
- {
- ///
- /// Initializes the singleton application object. This is the first line of authored code
- /// executed, and as such is the logical equivalent of main() or WinMain().
- ///
- public App()
- {
- this.InitializeComponent();
- this.Suspending += OnSuspending;
- }
-
- ///
- /// Invoked when the application is launched normally by the end user. Other entry points
- /// will be used when the application is launched to open a specific file, to display
- /// search results, and so forth.
- ///
- /// Details about the launch request and process.
- protected override void OnLaunched(LaunchActivatedEventArgs args)
- {
- Frame rootFrame = Window.Current.Content as Frame;
-
- // Do not repeat app initialization when the Window already has content,
- // just ensure that the window is active
- if (rootFrame == null)
- {
- // Create a Frame to act as the navigation context and navigate to the first page
- rootFrame = new Frame();
-
- if (args.PreviousExecutionState == ApplicationExecutionState.Terminated)
- {
- //TODO: Load state from previously suspended application
- }
-
- // Place the frame in the current Window
- Window.Current.Content = rootFrame;
- }
-
- if (rootFrame.Content == null)
- {
- // When the navigation stack isn't restored navigate to the first page,
- // configuring the new page by passing required information as a navigation
- // parameter
- if (!rootFrame.Navigate(typeof(MainPage), args.Arguments))
- {
- throw new Exception("Failed to create initial page");
- }
- }
- // Ensure the current window is active
- Window.Current.Activate();
- }
-
- ///
- /// Invoked when application execution is being suspended. Application state is saved
- /// without knowing whether the application will be terminated or resumed with the contents
- /// of memory still intact.
- ///
- /// The source of the suspend request.
- /// Details about the suspend request.
- private void OnSuspending(object sender, SuspendingEventArgs e)
- {
- var deferral = e.SuspendingOperation.GetDeferral();
- //TODO: Save application state and stop any background activity
- deferral.Complete();
- }
- }
-}
diff --git a/NAudioWin8Demo/Assets/Logo.png b/NAudioWin8Demo/Assets/Logo.png
deleted file mode 100644
index 57934f14..00000000
Binary files a/NAudioWin8Demo/Assets/Logo.png and /dev/null differ
diff --git a/NAudioWin8Demo/Assets/SmallLogo.png b/NAudioWin8Demo/Assets/SmallLogo.png
deleted file mode 100644
index 4907b990..00000000
Binary files a/NAudioWin8Demo/Assets/SmallLogo.png and /dev/null differ
diff --git a/NAudioWin8Demo/Assets/SplashScreen.png b/NAudioWin8Demo/Assets/SplashScreen.png
deleted file mode 100644
index b9ae7d57..00000000
Binary files a/NAudioWin8Demo/Assets/SplashScreen.png and /dev/null differ
diff --git a/NAudioWin8Demo/Assets/StoreLogo.png b/NAudioWin8Demo/Assets/StoreLogo.png
deleted file mode 100644
index 75889927..00000000
Binary files a/NAudioWin8Demo/Assets/StoreLogo.png and /dev/null differ
diff --git a/NAudioWin8Demo/Common/StandardStyles.xaml b/NAudioWin8Demo/Common/StandardStyles.xaml
deleted file mode 100644
index 01fd46a8..00000000
--- a/NAudioWin8Demo/Common/StandardStyles.xaml
+++ /dev/null
@@ -1,1829 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Mouse
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/NAudioWin8Demo/DelegateCommand.cs b/NAudioWin8Demo/DelegateCommand.cs
deleted file mode 100644
index ad109acc..00000000
--- a/NAudioWin8Demo/DelegateCommand.cs
+++ /dev/null
@@ -1,48 +0,0 @@
-using System;
-using System.Windows.Input;
-
-namespace NAudioWin8Demo
-{
- internal class DelegateCommand : ICommand
- {
- private readonly Action action;
- private bool enabled;
-
- public DelegateCommand(Action action)
- {
- this.action = action;
- this.enabled = true;
- }
-
- public bool IsEnabled
- {
- get { return enabled; }
- set
- {
- if (enabled != value)
- {
- enabled = value;
- OnCanExecuteChanged();
- }
- }
- }
-
- public bool CanExecute(object parameter)
- {
- return enabled;
- }
-
- public void Execute(object parameter)
- {
- action();
- }
-
- public event EventHandler CanExecuteChanged;
-
- protected virtual void OnCanExecuteChanged()
- {
- EventHandler handler = CanExecuteChanged;
- if (handler != null) handler(this, EventArgs.Empty);
- }
- }
-}
\ No newline at end of file
diff --git a/NAudioWin8Demo/MainPage.xaml b/NAudioWin8Demo/MainPage.xaml
deleted file mode 100644
index c3ab63fa..00000000
--- a/NAudioWin8Demo/MainPage.xaml
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/NAudioWin8Demo/MainPage.xaml.cs b/NAudioWin8Demo/MainPage.xaml.cs
deleted file mode 100644
index cf8e988b..00000000
--- a/NAudioWin8Demo/MainPage.xaml.cs
+++ /dev/null
@@ -1,39 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.IO;
-using System.Linq;
-using Windows.Foundation;
-using Windows.Foundation.Collections;
-using Windows.UI.Xaml;
-using Windows.UI.Xaml.Controls;
-using Windows.UI.Xaml.Controls.Primitives;
-using Windows.UI.Xaml.Data;
-using Windows.UI.Xaml.Input;
-using Windows.UI.Xaml.Media;
-using Windows.UI.Xaml.Navigation;
-
-// The Blank Page item template is documented at http://go.microsoft.com/fwlink/?LinkId=234238
-
-namespace NAudioWin8Demo
-{
- ///
- /// An empty page that can be used on its own or navigated to within a Frame.
- ///
- public sealed partial class MainPage : Page
- {
- public MainPage()
- {
- this.InitializeComponent();
- this.DataContext = new MainPageViewModel() {MediaElement = me};
- }
-
- ///
- /// Invoked when this page is about to be displayed in a Frame.
- ///
- /// Event data that describes how this page was reached. The Parameter
- /// property is typically used to configure the page.
- protected override void OnNavigatedTo(NavigationEventArgs e)
- {
- }
- }
-}
diff --git a/NAudioWin8Demo/MainPageViewModel.cs b/NAudioWin8Demo/MainPageViewModel.cs
deleted file mode 100644
index 6bf24322..00000000
--- a/NAudioWin8Demo/MainPageViewModel.cs
+++ /dev/null
@@ -1,175 +0,0 @@
-using System;
-using System.Linq;
-using System.Threading.Tasks;
-using Windows.Storage.Streams;
-using NAudio.CoreAudioApi;
-using NAudio.Wave;
-using NAudio.Win8.Wave.WaveOutputs;
-using Windows.Storage;
-using Windows.Storage.Pickers;
-using Windows.UI.Xaml.Controls;
-using System.IO;
-using NAudio.MediaFoundation;
-
-namespace NAudioWin8Demo
-{
- class MainPageViewModel : ViewModelBase
- {
- private IWavePlayer player;
- private WaveStream reader;
- private IWaveIn recorder;
- private MemoryStream recordStream;
- private IRandomAccessStream selectedStream;
-
- public MainPageViewModel()
- {
- LoadCommand = new DelegateCommand(Load);
- PlayCommand = new DelegateCommand(Play) { IsEnabled = false };
- PauseCommand = new DelegateCommand(Pause) { IsEnabled = false };
- StopCommand = new DelegateCommand(Stop) { IsEnabled = false };
- RecordCommand = new DelegateCommand(Record);
- StopRecordingCommand = new DelegateCommand(StopRecording) { IsEnabled = false };
- MediaFoundationApi.Startup();
- }
-
- private void Stop()
- {
- if (player != null)
- {
- player.Stop();
- }
- }
-
- private void Pause()
- {
- if (player != null)
- {
- player.Pause();
- }
- }
-
- private async void Play()
- {
- if (player == null)
- {
- // Exclusive mode - fails with a weird buffer alignment error
- player = new WasapiOutRT(AudioClientShareMode.Shared, 200);
- player.Init(CreateReader);
-
- player.PlaybackStopped += PlayerOnPlaybackStopped;
- }
-
- if (player.PlaybackState != PlaybackState.Playing)
- {
- //reader.Seek(0, SeekOrigin.Begin);
- player.Play();
- StopCommand.IsEnabled = true;
- PauseCommand.IsEnabled = true;
- LoadCommand.IsEnabled = false;
- }
- }
-
- private IWaveProvider CreateReader()
- {
- if (reader is RawSourceWaveStream)
- {
- reader.Position = 0;
- return reader;
- }
- reader = new MediaFoundationReaderRT(selectedStream);
- return reader;
- }
-
- private void Record()
- {
- if (recorder == null)
- {
- recorder = new WasapiCaptureRT();
- recorder.RecordingStopped += RecorderOnRecordingStopped;
- recorder.DataAvailable += RecorderOnDataAvailable;
- }
-
- if (reader != null)
- {
- reader.Dispose();
- reader = null;
- }
-
- recorder.StartRecording();
-
- RecordCommand.IsEnabled = false;
- StopRecordingCommand.IsEnabled = true;
- }
-
-
-
- private async void RecorderOnDataAvailable(object sender, WaveInEventArgs waveInEventArgs)
- {
- if (reader == null)
- {
- recordStream = new MemoryStream();
- reader = new RawSourceWaveStream(recordStream, recorder.WaveFormat);
- }
-
- await recordStream.WriteAsync(waveInEventArgs.Buffer, 0, waveInEventArgs.BytesRecorded);
- }
-
- private void StopRecording()
- {
- if (recorder != null)
- {
- recorder.StopRecording();
- }
- }
-
- private void RecorderOnRecordingStopped(object sender, StoppedEventArgs stoppedEventArgs)
- {
- RecordCommand.IsEnabled = true;
- StopRecordingCommand.IsEnabled = false;
- PlayCommand.IsEnabled = true;
- }
-
-
- private void PlayerOnPlaybackStopped(object sender, StoppedEventArgs stoppedEventArgs)
- {
- LoadCommand.IsEnabled = true;
- StopCommand.IsEnabled = false;
- PauseCommand.IsEnabled = false;
- if (reader != null)
- {
- reader.Position = 0;
- }
- }
-
- private async void Load()
- {
- if (player != null)
- {
- player.Dispose();
- player = null;
- }
- reader = null; // will be disposed by player
-
- var picker = new FileOpenPicker();
- picker.SuggestedStartLocation = PickerLocationId.MusicLibrary;
- picker.FileTypeFilter.Add("*");
- var file = await picker.PickSingleFileAsync();
- if (file == null) return;
- var stream = await file.OpenAsync(FileAccessMode.Read);
- if (stream == null) return;
- this.selectedStream = stream;
- PlayCommand.IsEnabled = true;
- }
-
- public DelegateCommand LoadCommand { get; private set; }
- public DelegateCommand PlayCommand { get; private set; }
- public DelegateCommand PauseCommand { get; private set; }
- public DelegateCommand StopCommand { get; private set; }
- public DelegateCommand RecordCommand { get; private set; }
- public DelegateCommand StopRecordingCommand { get; private set; }
-
- public MediaElement MediaElement { get; set; }
- }
-
-
-}
diff --git a/NAudioWin8Demo/MediaFoundationReaderRT.cs b/NAudioWin8Demo/MediaFoundationReaderRT.cs
deleted file mode 100644
index 7e8270c5..00000000
--- a/NAudioWin8Demo/MediaFoundationReaderRT.cs
+++ /dev/null
@@ -1,66 +0,0 @@
-using System;
-using NAudio.MediaFoundation;
-using NAudio.Wave;
-using Windows.Storage.Streams;
-
-namespace NAudioWin8Demo
-{
- // Slightly hacky approach to supporting a different WinRT constructor
- class MediaFoundationReaderRT : MediaFoundationReader
- {
- private readonly MediaFoundationReaderRTSettings settings;
-
- public class MediaFoundationReaderRTSettings : MediaFoundationReaderSettings
- {
- public MediaFoundationReaderRTSettings()
- {
- // can't recreate since we're using a file stream
- this.SingleReaderObject = true;
- }
-
- public IRandomAccessStream Stream { get; set; }
- }
-
- public MediaFoundationReaderRT(IRandomAccessStream stream)
- : this(new MediaFoundationReaderRTSettings() {Stream = stream})
- {
-
- }
-
-
- public MediaFoundationReaderRT(MediaFoundationReaderRTSettings settings)
- : base(null, settings)
- {
- this.settings = settings;
- }
-
- protected override IMFSourceReader CreateReader(MediaFoundationReaderSettings settings)
- {
- var fileStream = ((MediaFoundationReaderRTSettings) settings).Stream;
- var byteStream = MediaFoundationApi.CreateByteStream(fileStream);
- var reader = MediaFoundationApi.CreateSourceReaderFromByteStream(byteStream);
- reader.SetStreamSelection(MediaFoundationInterop.MF_SOURCE_READER_ALL_STREAMS, false);
- reader.SetStreamSelection(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, true);
-
- // Create a partial media type indicating that we want uncompressed PCM audio
-
- var partialMediaType = new MediaType();
- partialMediaType.MajorType = MediaTypes.MFMediaType_Audio;
- partialMediaType.SubType = settings.RequestFloatOutput ? AudioSubtypes.MFAudioFormat_Float : AudioSubtypes.MFAudioFormat_PCM;
-
- // set the media type
- // can return MF_E_INVALIDMEDIATYPE if not supported
- reader.SetCurrentMediaType(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, IntPtr.Zero, partialMediaType.MediaFoundationObject);
- return reader;
- }
-
- protected override void Dispose(bool disposing)
- {
- if (disposing && settings.Stream != null)
- {
- settings.Stream.Dispose();
- }
- base.Dispose(disposing);
- }
- }
-}
\ No newline at end of file
diff --git a/NAudioWin8Demo/NAudioWin8Demo.csproj b/NAudioWin8Demo/NAudioWin8Demo.csproj
deleted file mode 100644
index fb9ef000..00000000
--- a/NAudioWin8Demo/NAudioWin8Demo.csproj
+++ /dev/null
@@ -1,163 +0,0 @@
-
-
-
-
- Debug
- AnyCPU
- {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}
- AppContainerExe
- Properties
- NAudioWin8Demo
- NAudioWin8Demo
- en-US
- 512
- {BC8A1FFA-BEE3-4634-8014-F334798102B3};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}
- NAudioWin8Demo_TemporaryKey.pfx
- 5FE2AC161650E78EE8E6B89F85E99C20532A2EC5
- 8.1
- 12
-
-
-
- AnyCPU
- true
- full
- false
- bin\Debug\
- DEBUG;TRACE;NETFX_CORE
- prompt
- 4
-
-
- AnyCPU
- pdbonly
- true
- bin\Release\
- TRACE;NETFX_CORE
- prompt
- 4
-
-
- true
- bin\ARM\Debug\
- DEBUG;TRACE;NETFX_CORE
- ;2008
- full
- ARM
- false
- prompt
- true
-
-
- bin\ARM\Release\
- TRACE;NETFX_CORE
- true
- ;2008
- pdbonly
- ARM
- false
- prompt
- true
-
-
- true
- bin\x64\Debug\
- DEBUG;TRACE;NETFX_CORE
- ;2008
- full
- x64
- false
- prompt
- true
-
-
- bin\x64\Release\
- TRACE;NETFX_CORE
- true
- ;2008
- pdbonly
- x64
- false
- prompt
- true
-
-
- true
- bin\x86\Debug\
- DEBUG;TRACE;NETFX_CORE
- ;2008
- full
- x86
- false
- prompt
- true
-
-
- bin\x86\Release\
- TRACE;NETFX_CORE
- true
- ;2008
- pdbonly
- x86
- false
- prompt
- true
-
-
-
-
- {90543F38-E793-40C3-972D-3271EBF1DEF4}
- NAudio.Win8
-
-
-
-
- App.xaml
-
-
-
- MainPage.xaml
-
-
-
-
-
-
-
-
- Designer
-
-
-
-
-
-
-
-
-
-
-
- MSBuild:Compile
- Designer
-
-
- MSBuild:Compile
- Designer
-
-
- MSBuild:Compile
- Designer
-
-
-
- 12.0
-
-
-
-
\ No newline at end of file
diff --git a/NAudioWin8Demo/NAudioWin8Demo_TemporaryKey.pfx b/NAudioWin8Demo/NAudioWin8Demo_TemporaryKey.pfx
deleted file mode 100644
index 83dd2dca..00000000
Binary files a/NAudioWin8Demo/NAudioWin8Demo_TemporaryKey.pfx and /dev/null differ
diff --git a/NAudioWin8Demo/Package.appxmanifest b/NAudioWin8Demo/Package.appxmanifest
deleted file mode 100644
index f6f5568a..00000000
--- a/NAudioWin8Demo/Package.appxmanifest
+++ /dev/null
@@ -1,32 +0,0 @@
-
-
-
-
- NAudioWin8Demo
- Mark Heath
- Assets\StoreLogo.png
-
-
- 6.3.0
- 6.3.0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/NAudioWin8Demo/Properties/AssemblyInfo.cs b/NAudioWin8Demo/Properties/AssemblyInfo.cs
deleted file mode 100644
index e4e08df3..00000000
--- a/NAudioWin8Demo/Properties/AssemblyInfo.cs
+++ /dev/null
@@ -1,29 +0,0 @@
-using System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-// General Information about an assembly is controlled through the following
-// set of attributes. Change these attribute values to modify the information
-// associated with an assembly.
-[assembly: AssemblyTitle("NAudioWin8Demo")]
-[assembly: AssemblyDescription("")]
-[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("")]
-[assembly: AssemblyProduct("NAudioWin8Demo")]
-[assembly: AssemblyCopyright("Copyright © 2012")]
-[assembly: AssemblyTrademark("")]
-[assembly: AssemblyCulture("")]
-
-// Version information for an assembly consists of the following four values:
-//
-// Major Version
-// Minor Version
-// Build Number
-// Revision
-//
-// You can specify all the values or you can default the Build and Revision Numbers
-// by using the '*' as shown below:
-// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]
-[assembly: ComVisible(false)]
\ No newline at end of file
diff --git a/NAudioWin8Demo/ViewModelBase.cs b/NAudioWin8Demo/ViewModelBase.cs
deleted file mode 100644
index 0a95da27..00000000
--- a/NAudioWin8Demo/ViewModelBase.cs
+++ /dev/null
@@ -1,16 +0,0 @@
-using System.ComponentModel;
-using System.Runtime.CompilerServices;
-
-namespace NAudioWin8Demo
-{
- internal class ViewModelBase : INotifyPropertyChanged
- {
- public event PropertyChangedEventHandler PropertyChanged;
-
- protected virtual void OnPropertyChanged([CallerMemberName] string propertyName = null)
- {
- PropertyChangedEventHandler handler = PropertyChanged;
- if (handler != null) handler(this, new PropertyChangedEventArgs(propertyName));
- }
- }
-}
\ No newline at end of file
diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md
index 24ffecfa..05fc6a8f 100644
--- a/RELEASE_NOTES.md
+++ b/RELEASE_NOTES.md
@@ -1,5 +1,56 @@
-#### 1.8.0 (27 Dec 2016)
+### 1.8.4
+
+* Windows 10 Universal build now included in NuGet package
+* adding a TotalTime property to WaveFileWriter
+* adding a Broadcast Wave File Writer
+* Various bugfixes and enhancements:
+ * Prevent audio files from staying locked
+ * additional constructor for MultiplexingWaveProvider
+ * Faster SilenceWaveProvider implementation #257
+ * fixing calling stoprecording without ever starting recording on WaveIn
+ * improved reliability in WaveInEvent
+ * make non-strict MIDI file checking tolerant of invalid CC values #250
+ * Adding defaults for StereoToMonoProvider16 volumes #267
+
+### 1.8.3 (5 Sep 2017)
+
+* Allow access to property store of MMDevice
+* Various bugfixes and enhancements:
+ * Support unicode in MIDI TextEvent
+ * Fixed noise issue on restart DirectSoundOut
+ * improved support for mono AAC #223
+ * fix NullReferenceException opening AsioOut by index #234
+
+
+### 1.8.2 (6 Aug 2017)
+
+* AudioFileReader supports filenames ending with .aif
+* Various bugfixes and enhancements:
+ * fixing problem with Mp3FileReader position advancing too rapidly #202
+ * Implemented IDisposable in MMDevice
+ * fix dispose of AudioSessionManager
+
+
+### 1.8.1 (22 Jul 2017)
+
+* AsioOut exposes FramesPerBuffer
+* change WaveOut and WaveOutEvent default DeviceNumber to -1 (Mapper)
+* Added MidiFile constructor overload that takes an input Stream object.
+* Various bugfixes and enhancements:
+ * desktop apps use MFCreateMFByteStreamOnStream instead of MFCreateMFByteStreamOnStream
+ * Fix for propvariant marshalling #154
+ * Soundfont should not require isng chunk #150
+ * Fixed potential MFT memory leak
+ * Mp3FileReader.ReadFrame advances Position #161
+ * sfzfilereader class obsoleted
+ * ensure DriverName property always set on AsioOut. #169
+ * WaveFormatConversionProvider can throw an error in finalizer #188
+ * Restore compatibility with .NET Portable. #189
+ * improved error message for channel index out of range #208
+ * Added Releasing of Com Object to AudioEndpointVolume Dispose
+
+### 1.8.0 (27 Dec 2016)
* Windows 10 Universal project. Very similar feature set to the Win 8 one.
* Added a Windows 10 Universal demo app with limited functionality
@@ -38,7 +89,7 @@
* Fixed MIDI to type 1 converter bug
-#### 1.7.3 5 Mar 2015
+### 1.7.3 5 Mar 2015
* WaveFileWriter.Flush now updates the WAV headers, resulting in a playable file without having to call Dispose
* SampleToWaveProvider24 class added for conversion to 24 bit
@@ -57,7 +108,7 @@
* WaveOutEvent can set device volume
* Better handling of WAVEFORMATEXTENSIBLE for WasapiIn
-#### 1.7.2 24 Nov 2014
+### 1.7.2 24 Nov 2014
* WaveFileReader and WaveFileWriter supporting data chunk > 2GB
* Working towards making WinRT build pass WACK
@@ -74,7 +125,7 @@
* Fixes for sync context issues in ASP.NET
* Fixed WasapiOut could stop when playing resampled audio
-#### 1.7.1 10 Apr 2014
+### 1.7.1 10 Apr 2014
* WdlResampler - a fully managed resampler based on the one from CockosWDL
* AdsrSampleProvider for creating ADSR envelopes
@@ -90,8 +141,8 @@
* OffsetSampleProvider bugfixes and TimeSpan helper methods
* Cue markers RIFF chunk writing fixes
* WaveIn and WaveOutEvent robustness fixes
-
-#### 1.7.0 29 Oct 2013
+
+### 1.7.0 29 Oct 2013
[Release announcement](http://markheath.net/post/naudio-17-release-notes)
* MediaFoundationReader allows you to play any audio files that Media Foundation can play, which on Windows 7 and above means playback of AAC, MP3, WMA, as well as playing the audio from video files.
@@ -121,7 +172,7 @@
* Use WasapiCaptureRTfor record (thanks to Kassoul for some performance enhancement suggestions)
* There is a demo application in the NAudio source code showing record and playback
-#### 1.6.0 26 Oct 2012
+### 1.6.0 26 Oct 2012
[Release Announcement](http://markheath.net/post/naudio-16-release-notes-10th)
@@ -145,7 +196,7 @@
* WMA Nuget Package (NAudio.Wma) for playing WMA files. Download here.
* RF64 read support
-#### 1.5.0 18 Dec 2011
+### 1.5.0 18 Dec 2011
[Release Announcement](http://markheath.net/post/naudio-15-released)
@@ -201,7 +252,7 @@
* WPF Demo has visualization plugins, allowing me to trial different drawing mechanisms
* WPF Demo has a (very basic) drum machine example
-#### 1.4.0 20 Apr 2011
+### 1.4.0 20 Apr 2011
[Release announcement](http://markheath.net/post/naudio-14-release-notes)
@@ -232,7 +283,7 @@
* WaveOut uses 2 buffers not 3 by default now (a leftover from the original days of NAudio when my PC had a 400MHz Pentium II processor!).
* Lots more minor bug fixes & patches applied – see the check-in history for full details
-#### 1.3.0 10 Oct 2009
+### 1.3.0 10 Oct 2009
[Release Announcement](http://markheath.net/post/naudio-13-release-notes)
@@ -249,7 +300,7 @@
* WASAPI audio capture is now supported.
* NAudio should now work correctly on x64operating systems (accomplished this by setting Visual Studio to compile for x86).
-#### 1.2.0 26 Jun 2008
+### 1.2.0 26 Jun 2008
[Release Announcement](http://markheath.net/post/naudio-12-release-notes)
@@ -272,7 +323,7 @@
* IWaveProvider Tech Preview - As discussed recently on my blog, we will be using a new interface called IWaveProvider in future versions of NAudio, which uses the WaveBuffer class. This code is available in the version 1.2 release, but you are not currently required to use it.
* Alexandre Mutel- Finally, this version welcomes a new contributor to the team. In fact, Alexandre is the first contributor I have added to this project. He has provided the new implementations of ASIO and DirectSoundOut, as well as helping out with WASAPI and the new IWaveProvider interface design. His enthusiasm for the project has also meant that I have been working on it a little more than I might have otherwise!
-#### 1.1.0 26 May 2008
+### 1.1.0 26 May 2008
* Added some new NoteEvent and NoteOnEvent constructors
* WaveOffsetStream
* WaveStream32 preparation for 24 bit inputs
@@ -297,7 +348,7 @@
* More MidiEventCollection automated tests
* Test application can now send test MIDI out messages
-#### 1.0.0 19 Apr 2007
+### 1.0.0 19 Apr 2007
* Minor updates to support EZdrummer MIDI converter
* Beginnings of a new WaveOut class with its own thread
* Fixed a bug in WaveFileReader
@@ -328,7 +379,7 @@
* WaveMixerStream32 updated ready to support dynamic adding of streams
* Some bugfixes to WaveOut to support auto stop mode again
-#### 0.9.0 6 Oct 2006
+### 0.9.0 6 Oct 2006
* ACM stream bug fixes
* Support for waveOut window message callbacks
* Wave In Recording bug fixes
@@ -361,7 +412,7 @@
* Meta events use VarInts for lengths now
* Allow non-strict reading of MIDI file
-#### 0.8.0 21 Feb 2006
+### 0.8.0 21 Feb 2006
* Minor bug fix to WaveMixer classes
* NICE specific code removed
* MP3 Reader can read ID3 tags and frames now
@@ -379,7 +430,7 @@
* Added simple compressor
* Added simple gate
-#### 0.7.0 12 Dec 2005
+### 0.7.0 12 Dec 2005
* Made a 16 and 32 bit mixer stream
* Made a 32 bit WaveChannel stream
* A 32 to 16 bit conversion stream
@@ -398,7 +449,7 @@
* Support for Speed codec
* WaveStream inherits Stream
-#### 0.6.0 16 Nov 2005
+### 0.6.0 16 Nov 2005
* Dual channel strip in WavePlayer
* Fixed bad calculation of offset seconds in WavePlayer
* Improved checking that we don't go beyond the end of streams
@@ -425,7 +476,7 @@
* Converted to .NET 2.0
* n.b. DirectSound has issues - needed to turn off the LoaderLock Managed Debug Assistant
-#### 0.5.0 31 Oct 2005
+### 0.5.0 31 Oct 2005
* WaveChannel can supply from stereo input
* Initial VST interfaces and enums
* VstLoader implements IVstEffect
@@ -441,7 +492,7 @@
* Some more LCD control characters
* Initial WaveViewer control
-#### 0.4.0 12 May 2005
+### 0.4.0 12 May 2005
* changes recommended by FxCop
* namespace changed to NAudio
* XML documentation, FxCop fixes, Namespace improvements
@@ -461,7 +512,7 @@
* A very basic time domain convolution
* Improvements to wave-reader and writer for floating point audio
-#### 0.3.0 8 Mar 2005
+### 0.3.0 8 Mar 2005
* Skip backwards and forwards in wav file
* WavPlayer trackBar indicates progress
* Allows trackBar repositioning
@@ -489,7 +540,7 @@
* Greatly improved the ability to calculate appropriate buffer sizes
* Realtime GSM decoding is now working
-#### 0.2.0 25 Feb 2005
+### 0.2.0 25 Feb 2005
* Improvements to WaveStream class
* SoundFont library merged
* Converted to Visual Studio .NET
@@ -498,7 +549,7 @@
* Generic WaveStream class and WaveFileReader
* Improved class design trying to fix WaveOut bug (waveout callback was being GCed)
-#### 0.1.0 23 Dec 2002
+### 0.1.0 23 Dec 2002
* Added pause and stop for WaveOut
* Got wave playing working better
* Wave functions improved
@@ -507,5 +558,5 @@
* Improvements to Mixer interop & classes
* Added MIDI interop, MMException, more mixer classes
-#### 0.0.0 9 Dec 2002
+### 0.0.0 9 Dec 2002
* Initial version, basic mixer interop
diff --git a/build.bat b/build.bat
index 641b987b..82b013bf 100644
--- a/build.bat
+++ b/build.bat
@@ -1,3 +1,6 @@
+REM can call with target eg
+REM build zipall
+REM build nuget
@echo off
cls
if not exist "packages\FAKE" "Tools\NuGet.exe" "Install" "FAKE" "-OutputDirectory" "packages" "-ExcludeVersion"
diff --git a/build.fsx b/build.fsx
index dc409775..c5583860 100644
--- a/build.fsx
+++ b/build.fsx
@@ -55,11 +55,11 @@ Target "NuGet" (fun _ ->
{p with
(*Authors = authors
Project = projectName
- Description = projectDescription
+ Description = projectDescription
Summary = projectSummary
WorkingDir = packagingDir
AccessKey = myAccesskey*)
- Version = "1.8.3" // todo get the version number from elsewhere
+ Version = "1.8.4" // todo get the version number from elsewhere
WorkingDir = "."
OutputPath = deployDir
@@ -85,7 +85,7 @@ let demoFiles =
|> Seq.map (fun a -> a, Path.GetFullPath (sprintf "./%s/bin/Debug" a))
|> Seq.map (fun (a,b) -> a, { demoIncludes with BaseDirectory = b })
|> List.ofSeq
-
+
Target "ZipDemo" (fun _ ->
CreateZipOfIncludes (deployDir + "NAudio-Demos.zip") "" DefaultZipLevel demoFiles
)
@@ -114,8 +114,6 @@ Target "ZipLib" (fun _ ->
?=> "ReleaseBuild"
==> "Release"
-
-
"ZipDemo" ==> "ZipAll"
"ZipLib" ==> "ZipAll"
"ZipSource" ==> "ZipAll"