From f8fe61c3a90dd267746ef950f00ad659f498b786 Mon Sep 17 00:00:00 2001 From: Mark Heath Date: Wed, 6 Dec 2017 20:31:37 +0000 Subject: [PATCH] retiring Win 8 project --- .travis.yml | 7 - NAudio.Universal/NAudio.Universal.csproj | 14 +- NAudio.Universal/Properties/AssemblyInfo.cs | 2 +- .../Wave/WaveInputs/WasapiCaptureRT.cs | 753 ++++--- .../Wave/WaveOutputs/WasapiOutRT.cs | 1308 ++++++------ .../Wave/WaveOutputs/WaveFileWriterRT.cs | 978 ++++----- NAudio.Win8.sln | 68 - NAudio.Win8.sln.DotSettings | 6 - NAudio.Win8/NAudio.Win8.csproj | 837 -------- NAudio.Win8/Properties/AssemblyInfo.cs | 29 - NAudio.Win8/Wave/WaveOutputs/IWavePlayer.cs | 53 - NAudio.nuspec | 6 +- NAudio/Properties/AssemblyInfo.cs | 4 +- NAudioUniversalDemo/MainPageViewModel.cs | 30 +- .../MediaFoundationReaderUniversal.cs | 6 +- .../Properties/AssemblyInfo.cs | 2 +- NAudioWin8Demo/App.xaml | 20 - NAudioWin8Demo/App.xaml.cs | 90 - NAudioWin8Demo/Assets/Logo.png | Bin 2724 -> 0 bytes NAudioWin8Demo/Assets/SmallLogo.png | Bin 808 -> 0 bytes NAudioWin8Demo/Assets/SplashScreen.png | Bin 14763 -> 0 bytes NAudioWin8Demo/Assets/StoreLogo.png | Bin 1069 -> 0 bytes NAudioWin8Demo/Common/StandardStyles.xaml | 1829 ----------------- NAudioWin8Demo/DelegateCommand.cs | 48 - NAudioWin8Demo/MainPage.xaml | 23 - NAudioWin8Demo/MainPage.xaml.cs | 39 - NAudioWin8Demo/MainPageViewModel.cs | 175 -- NAudioWin8Demo/MediaFoundationReaderRT.cs | 66 - NAudioWin8Demo/NAudioWin8Demo.csproj | 163 -- .../NAudioWin8Demo_TemporaryKey.pfx | Bin 2460 -> 0 bytes NAudioWin8Demo/Package.appxmanifest | 32 - NAudioWin8Demo/Properties/AssemblyInfo.cs | 29 - NAudioWin8Demo/ViewModelBase.cs | 16 - RELEASE_NOTES.md | 97 +- build.bat | 3 + build.fsx | 8 +- 36 files changed, 1619 insertions(+), 5122 deletions(-) delete mode 100644 .travis.yml rename {NAudio.Win8 => NAudio.Universal}/Wave/WaveInputs/WasapiCaptureRT.cs (84%) rename {NAudio.Win8 => NAudio.Universal}/Wave/WaveOutputs/WasapiOutRT.cs (97%) rename {NAudio.Win8 => NAudio.Universal}/Wave/WaveOutputs/WaveFileWriterRT.cs (97%) delete mode 100644 NAudio.Win8.sln delete mode 100644 NAudio.Win8.sln.DotSettings delete mode 100644 NAudio.Win8/NAudio.Win8.csproj delete mode 100644 NAudio.Win8/Properties/AssemblyInfo.cs delete mode 100644 NAudio.Win8/Wave/WaveOutputs/IWavePlayer.cs delete mode 100644 NAudioWin8Demo/App.xaml delete mode 100644 NAudioWin8Demo/App.xaml.cs delete mode 100644 NAudioWin8Demo/Assets/Logo.png delete mode 100644 NAudioWin8Demo/Assets/SmallLogo.png delete mode 100644 NAudioWin8Demo/Assets/SplashScreen.png delete mode 100644 NAudioWin8Demo/Assets/StoreLogo.png delete mode 100644 NAudioWin8Demo/Common/StandardStyles.xaml delete mode 100644 NAudioWin8Demo/DelegateCommand.cs delete mode 100644 NAudioWin8Demo/MainPage.xaml delete mode 100644 NAudioWin8Demo/MainPage.xaml.cs delete mode 100644 NAudioWin8Demo/MainPageViewModel.cs delete mode 100644 NAudioWin8Demo/MediaFoundationReaderRT.cs delete mode 100644 NAudioWin8Demo/NAudioWin8Demo.csproj delete mode 100644 NAudioWin8Demo/NAudioWin8Demo_TemporaryKey.pfx delete mode 100644 NAudioWin8Demo/Package.appxmanifest delete mode 100644 NAudioWin8Demo/Properties/AssemblyInfo.cs delete mode 100644 NAudioWin8Demo/ViewModelBase.cs diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index d922db6c..00000000 --- a/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -# runs on mono, so we can't build the Win 8 or Universal projects -# docs here: https://docs.travis-ci.com/user/languages/csharp/ -language: csharp -solution: NAudio.sln -script: - - xbuild /p:Configuration=Release NAudio.sln - - mono ./packages/NUnit.*/tools/nunit-console.exe /exclude=IntegrationTest ./NAudioTests/bin/Release/NAudioTests.dll diff --git a/NAudio.Universal/NAudio.Universal.csproj b/NAudio.Universal/NAudio.Universal.csproj index 95a543fd..e32400fb 100644 --- a/NAudio.Universal/NAudio.Universal.csproj +++ b/NAudio.Universal/NAudio.Universal.csproj @@ -26,6 +26,7 @@ DEBUG;TRACE;NETFX_CORE;WINDOWS_UAP prompt 4 + bin\Debug\NAudio.Universal.XML AnyCPU @@ -35,6 +36,7 @@ TRACE;NETFX_CORE;WINDOWS_UAP prompt 4 + bin\Release\NAudio.Universal.XML ARM @@ -114,15 +116,6 @@ - - Wave\WaveInputs\WasapiCaptureRT.cs - - - Wave\WaveOutputs\WasapiOutRT.cs - - - Wave\WaveOutputs\WaveFileWriterRT.cs - Codecs\ALawDecoder.cs @@ -828,7 +821,10 @@ + + + diff --git a/NAudio.Universal/Properties/AssemblyInfo.cs b/NAudio.Universal/Properties/AssemblyInfo.cs index 7627889a..b23c0f55 100644 --- a/NAudio.Universal/Properties/AssemblyInfo.cs +++ b/NAudio.Universal/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("NAudio.Universal")] -[assembly: AssemblyCopyright("Copyright © Mark Heath 2015")] +[assembly: AssemblyCopyright("Copyright © Mark Heath 2017")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/NAudio.Win8/Wave/WaveInputs/WasapiCaptureRT.cs b/NAudio.Universal/Wave/WaveInputs/WasapiCaptureRT.cs similarity index 84% rename from NAudio.Win8/Wave/WaveInputs/WasapiCaptureRT.cs rename to NAudio.Universal/Wave/WaveInputs/WasapiCaptureRT.cs index bee798b4..6aa1aa37 100644 --- a/NAudio.Win8/Wave/WaveInputs/WasapiCaptureRT.cs +++ b/NAudio.Universal/Wave/WaveInputs/WasapiCaptureRT.cs @@ -1,381 +1,372 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.Threading.Tasks; -using NAudio.CoreAudioApi; -using NAudio.CoreAudioApi.Interfaces; -using NAudio.Wave; -using System.Threading; -using System.Diagnostics; -using System.Runtime.InteropServices; -using NAudio.Win8.Wave.WaveOutputs; -using Windows.Devices.Enumeration; -using Windows.Media.Devices; - -namespace NAudio.Wave -{ - enum WasapiCaptureState - { - Uninitialized, - Stopped, - Recording, - Disposed - } - - /// - /// Audio Capture using Wasapi - /// See http://msdn.microsoft.com/en-us/library/dd370800%28VS.85%29.aspx - /// - public class WasapiCaptureRT : IWaveIn - { - static readonly Guid IID_IAudioClient2 = new Guid("726778CD-F60A-4eda-82DE-E47610CD78AA"); - private const long REFTIMES_PER_SEC = 10000000; - private const long REFTIMES_PER_MILLISEC = 10000; - private volatile WasapiCaptureState captureState; - private byte[] recordBuffer; - private readonly string device; - private int bytesPerFrame; - private WaveFormat waveFormat; - private AudioClient audioClient; - private IntPtr hEvent; - private Task captureTask; - private SynchronizationContext syncContext; - - /// - /// Indicates recorded data is available - /// - public event EventHandler DataAvailable; - - /// - /// Indicates that all recorded data has now been received. - /// - public event EventHandler RecordingStopped; - private int latencyMilliseconds; - - /// - /// Initialises a new instance of the WASAPI capture class - /// - public WasapiCaptureRT() : - this(GetDefaultCaptureDevice()) - { - } - - /// - /// Initialises a new instance of the WASAPI capture class - /// - /// Capture device to use - public WasapiCaptureRT(string device) - { - this.device = device; - this.syncContext = SynchronizationContext.Current; - //this.waveFormat = audioClient.MixFormat; - } - - /// - /// Recording wave format - /// - public virtual WaveFormat WaveFormat - { - get - { - // for convenience, return a WAVEFORMATEX, instead of the real - // WAVEFORMATEXTENSIBLE being used - var wfe = waveFormat as WaveFormatExtensible; - if (wfe != null) - { - try - { - return wfe.ToStandardWaveFormat(); - } - catch (InvalidOperationException) - { - // couldn't convert to a standard format - } - } - return waveFormat; - } - set { waveFormat = value; } - } - - /// - /// Way of enumerating all the audio capture devices available on the system - /// - /// - public async static Task> GetCaptureDevices() - { - var audioCaptureSelector = MediaDevice.GetAudioCaptureSelector(); - - // (a PropertyKey) - var supportsEventDrivenMode = "{1da5d803-d492-4edd-8c23-e0c0ffee7f0e} 7"; - - var captureDevices = await DeviceInformation.FindAllAsync(audioCaptureSelector, new[] { supportsEventDrivenMode } ); - return captureDevices; - } - - /// - /// Gets the default audio capture device - /// - /// The default audio capture device - public static string GetDefaultCaptureDevice() - { - var defaultCaptureDeviceId = MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default); - return defaultCaptureDeviceId; - } - - /// - /// Initializes the capture device. Must be called on the UI (STA) thread. - /// If not called manually then StartRecording() will call it internally. - /// - public async Task InitAsync() - { - if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT)); - if (captureState != WasapiCaptureState.Uninitialized) throw new InvalidOperationException("Already initialized"); - - var icbh = new ActivateAudioInterfaceCompletionHandler(ac2 => InitializeCaptureDevice((IAudioClient)ac2)); - IActivateAudioInterfaceAsyncOperation activationOperation; - // must be called on UI thread - NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation); - audioClient = new AudioClient((IAudioClient)(await icbh)); - - hEvent = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS); - audioClient.SetEventHandle(hEvent); - - captureState = WasapiCaptureState.Stopped; - } - - private void InitializeCaptureDevice(IAudioClient audioClientInterface) - { - var audioClient = new AudioClient((IAudioClient)audioClientInterface); - if (waveFormat == null) - { - waveFormat = audioClient.MixFormat; - } - - long requestedDuration = REFTIMES_PER_MILLISEC * 100; - - - if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, waveFormat)) - { - throw new ArgumentException("Unsupported Wave Format"); - } - - var streamFlags = GetAudioClientStreamFlags(); - - audioClient.Initialize(AudioClientShareMode.Shared, - streamFlags, - requestedDuration, - 0, - waveFormat, - Guid.Empty); - - - int bufferFrameCount = audioClient.BufferSize; - this.bytesPerFrame = this.waveFormat.Channels * this.waveFormat.BitsPerSample / 8; - this.recordBuffer = new byte[bufferFrameCount * bytesPerFrame]; - Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length)); - - // Get back the effective latency from AudioClient - latencyMilliseconds = (int)(audioClient.StreamLatency / 10000); - } - - /// - /// To allow overrides to specify different flags (e.g. loopback) - /// - protected virtual AudioClientStreamFlags GetAudioClientStreamFlags() - { - return AudioClientStreamFlags.EventCallback; - } - - /// - /// Start Recording - /// - public async void StartRecording() - { - if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT)); - if (captureState == WasapiCaptureState.Uninitialized) await InitAsync(); - - captureState = WasapiCaptureState.Recording; - - captureTask = Task.Run(() => DoRecording()); - - Debug.WriteLine("Recording..."); - } - - /// - /// Stop Recording - /// - public void StopRecording() - { - if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT)); - if (captureState != WasapiCaptureState.Recording) return; - - captureState = WasapiCaptureState.Stopped; - captureTask?.Wait(5000); - Debug.WriteLine("WasapiCaptureRT stopped"); - } - - private void DoRecording() - { - Debug.WriteLine("Recording buffer size: " + audioClient.BufferSize); - - var buf = new Byte[audioClient.BufferSize * bytesPerFrame]; - - int bufLength = 0; - int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms - - try - { - AudioCaptureClient capture = audioClient.AudioCaptureClient; - audioClient.Start(); - - int packetSize = capture.GetNextPacketSize(); - - while (captureState == WasapiCaptureState.Recording) - { - IntPtr pData = IntPtr.Zero; - int numFramesToRead = 0; - AudioClientBufferFlags dwFlags = 0; - - if (packetSize == 0) - { - if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0) - { - throw new Exception("Capture event timeout"); - } - } - - pData = capture.GetBuffer(out numFramesToRead, out dwFlags); - - if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0) - { - pData = IntPtr.Zero; - } - - if (numFramesToRead == 0) { continue; } - - int capturedBytes = numFramesToRead * bytesPerFrame; - - if (pData == IntPtr.Zero) - { - Array.Clear(buf, bufLength, capturedBytes); - } - else - { - Marshal.Copy(pData, buf, bufLength, capturedBytes); - } - - bufLength += capturedBytes; - - capture.ReleaseBuffer(numFramesToRead); - - if (bufLength >= minPacketSize) - { - if (DataAvailable != null) - { - DataAvailable(this, new WaveInEventArgs(buf, bufLength)); - } - bufLength = 0; - } - - packetSize = capture.GetNextPacketSize(); - } - } - catch (Exception ex) - { - RaiseRecordingStopped(ex); - Debug.WriteLine("stop wasapi"); - } - finally - { - RaiseRecordingStopped(null); - - audioClient.Stop(); - } - Debug.WriteLine("stop wasapi"); - } - - private void RaiseRecordingStopped(Exception exception) - { - var handler = RecordingStopped; - if (handler != null) - { - if (this.syncContext == null) - { - handler(this, new StoppedEventArgs(exception)); - } - else - { - syncContext.Post(state => handler(this, new StoppedEventArgs(exception)), null); - } - } - } - - private void ReadNextPacket(AudioCaptureClient capture) - { - IntPtr buffer; - int framesAvailable; - AudioClientBufferFlags flags; - int packetSize = capture.GetNextPacketSize(); - int recordBufferOffset = 0; - //Debug.WriteLine(string.Format("packet size: {0} samples", packetSize / 4)); - - while (packetSize != 0) - { - buffer = capture.GetBuffer(out framesAvailable, out flags); - - int bytesAvailable = framesAvailable * bytesPerFrame; - - // apparently it is sometimes possible to read more frames than we were expecting? - // fix suggested by Michael Feld: - int spaceRemaining = Math.Max(0, recordBuffer.Length - recordBufferOffset); - if (spaceRemaining < bytesAvailable && recordBufferOffset > 0) - { - if (DataAvailable != null) DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset)); - recordBufferOffset = 0; - } - - // if not silence... - if ((flags & AudioClientBufferFlags.Silent) != AudioClientBufferFlags.Silent) - { - Marshal.Copy(buffer, recordBuffer, recordBufferOffset, bytesAvailable); - } - else - { - Array.Clear(recordBuffer, recordBufferOffset, bytesAvailable); - } - recordBufferOffset += bytesAvailable; - capture.ReleaseBuffer(framesAvailable); - packetSize = capture.GetNextPacketSize(); - } - if (DataAvailable != null) - { - DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset)); - } - } - - /// - /// Dispose - /// - public void Dispose() - { - if (captureState == WasapiCaptureState.Disposed) return; - - try - { - StopRecording(); - - NativeMethods.CloseHandle(hEvent); - audioClient?.Dispose(); - } - catch (Exception ex) - { - Debug.WriteLine("Exception disposing WasapiCaptureRT: " + ex.ToString()); - } - - hEvent = IntPtr.Zero; - audioClient = null; - - captureState = WasapiCaptureState.Disposed; - } - } -} +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using NAudio.CoreAudioApi; +using NAudio.CoreAudioApi.Interfaces; +using System.Threading; +using System.Diagnostics; +using System.Runtime.InteropServices; +using NAudio.Win8.Wave.WaveOutputs; +using Windows.Devices.Enumeration; +using Windows.Media.Devices; + +namespace NAudio.Wave +{ + enum WasapiCaptureState + { + Uninitialized, + Stopped, + Recording, + Disposed + } + + /// + /// Audio Capture using Wasapi + /// See http://msdn.microsoft.com/en-us/library/dd370800%28VS.85%29.aspx + /// + public class WasapiCaptureRT : IWaveIn + { + static readonly Guid IID_IAudioClient2 = new Guid("726778CD-F60A-4eda-82DE-E47610CD78AA"); + private const long REFTIMES_PER_SEC = 10000000; + private const long REFTIMES_PER_MILLISEC = 10000; + private volatile WasapiCaptureState captureState; + private byte[] recordBuffer; + private readonly string device; + private int bytesPerFrame; + private WaveFormat waveFormat; + private AudioClient audioClient; + private IntPtr hEvent; + private Task captureTask; + private readonly SynchronizationContext syncContext; + + /// + /// Indicates recorded data is available + /// + public event EventHandler DataAvailable; + + /// + /// Indicates that all recorded data has now been received. + /// + public event EventHandler RecordingStopped; + + /// + /// The effective latency in milliseconds + /// + public int LatencyMilliseconds { get; private set; } + + /// + /// Initialises a new instance of the WASAPI capture class + /// + public WasapiCaptureRT() : + this(GetDefaultCaptureDevice()) + { + } + + /// + /// Initialises a new instance of the WASAPI capture class + /// + /// Capture device to use + public WasapiCaptureRT(string device) + { + this.device = device; + syncContext = SynchronizationContext.Current; + //this.waveFormat = audioClient.MixFormat; + } + + /// + /// Recording wave format + /// + public virtual WaveFormat WaveFormat + { + get + { + // for convenience, return a WAVEFORMATEX, instead of the real + // WAVEFORMATEXTENSIBLE being used + if (waveFormat is WaveFormatExtensible wfe) + { + try + { + return wfe.ToStandardWaveFormat(); + } + catch (InvalidOperationException) + { + // couldn't convert to a standard format + } + } + return waveFormat; + } + set => waveFormat = value; + } + + /// + /// Way of enumerating all the audio capture devices available on the system + /// + /// + public static async Task> GetCaptureDevices() + { + var audioCaptureSelector = MediaDevice.GetAudioCaptureSelector(); + + // (a PropertyKey) + var supportsEventDrivenMode = "{1da5d803-d492-4edd-8c23-e0c0ffee7f0e} 7"; + + var captureDevices = await DeviceInformation.FindAllAsync(audioCaptureSelector, new[] { supportsEventDrivenMode } ); + return captureDevices; + } + + /// + /// Gets the default audio capture device + /// + /// The default audio capture device + public static string GetDefaultCaptureDevice() + { + var defaultCaptureDeviceId = MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default); + return defaultCaptureDeviceId; + } + + /// + /// Initializes the capture device. Must be called on the UI (STA) thread. + /// If not called manually then StartRecording() will call it internally. + /// + public async Task InitAsync() + { + if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT)); + if (captureState != WasapiCaptureState.Uninitialized) throw new InvalidOperationException("Already initialized"); + + var icbh = new ActivateAudioInterfaceCompletionHandler(ac2 => InitializeCaptureDevice((IAudioClient)ac2)); + // must be called on UI thread + NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out var activationOperation); + audioClient = new AudioClient((IAudioClient)(await icbh)); + + hEvent = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS); + audioClient.SetEventHandle(hEvent); + + captureState = WasapiCaptureState.Stopped; + } + + private void InitializeCaptureDevice(IAudioClient audioClientInterface) + { + var audioClient = new AudioClient((IAudioClient)audioClientInterface); + if (waveFormat == null) + { + waveFormat = audioClient.MixFormat; + } + + long requestedDuration = REFTIMES_PER_MILLISEC * 100; + + if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, waveFormat)) + { + throw new ArgumentException("Unsupported Wave Format"); + } + + var streamFlags = GetAudioClientStreamFlags(); + + audioClient.Initialize(AudioClientShareMode.Shared, + streamFlags, + requestedDuration, + 0, + waveFormat, + Guid.Empty); + + int bufferFrameCount = audioClient.BufferSize; + bytesPerFrame = waveFormat.Channels * waveFormat.BitsPerSample / 8; + recordBuffer = new byte[bufferFrameCount * bytesPerFrame]; + //Debug.WriteLine("record buffer size = {0}", this.recordBuffer.Length); + + // Get back the effective latency from AudioClient + LatencyMilliseconds = (int)(audioClient.StreamLatency / 10000); + } + + /// + /// To allow overrides to specify different flags (e.g. loopback) + /// + protected virtual AudioClientStreamFlags GetAudioClientStreamFlags() + { + return AudioClientStreamFlags.EventCallback; + } + + /// + /// Start Recording + /// + public async void StartRecording() + { + if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT)); + if (captureState == WasapiCaptureState.Uninitialized) await InitAsync(); + + captureState = WasapiCaptureState.Recording; + + captureTask = Task.Run(() => DoRecording()); + + Debug.WriteLine("Recording..."); + } + + /// + /// Stop Recording + /// + public void StopRecording() + { + if (captureState == WasapiCaptureState.Disposed) throw new ObjectDisposedException(nameof(WasapiCaptureRT)); + if (captureState != WasapiCaptureState.Recording) return; + + captureState = WasapiCaptureState.Stopped; + captureTask?.Wait(5000); + //Debug.WriteLine("WasapiCaptureRT stopped"); + } + + private void DoRecording() + { + //Debug.WriteLine("Recording buffer size: " + audioClient.BufferSize); + + var buf = new byte[audioClient.BufferSize * bytesPerFrame]; + + int bufLength = 0; + int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms + + try + { + AudioCaptureClient capture = audioClient.AudioCaptureClient; + audioClient.Start(); + + int packetSize = capture.GetNextPacketSize(); + + while (captureState == WasapiCaptureState.Recording) + { + if (packetSize == 0) + { + if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0) + { + throw new Exception("Capture event timeout"); + } + } + + var pData = capture.GetBuffer(out var numFramesToRead, out var dwFlags); + + if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0) + { + pData = IntPtr.Zero; + } + + if (numFramesToRead == 0) { continue; } + + int capturedBytes = numFramesToRead * bytesPerFrame; + + if (pData == IntPtr.Zero) + { + Array.Clear(buf, bufLength, capturedBytes); + } + else + { + Marshal.Copy(pData, buf, bufLength, capturedBytes); + } + + bufLength += capturedBytes; + + capture.ReleaseBuffer(numFramesToRead); + + if (bufLength >= minPacketSize) + { + DataAvailable?.Invoke(this, new WaveInEventArgs(buf, bufLength)); + bufLength = 0; + } + + packetSize = capture.GetNextPacketSize(); + } + } + catch (Exception ex) + { + RaiseRecordingStopped(ex); + Debug.WriteLine("stop wasapi"); + } + finally + { + RaiseRecordingStopped(null); + + audioClient.Stop(); + } + Debug.WriteLine("stop wasapi"); + } + + private void RaiseRecordingStopped(Exception exception) + { + var handler = RecordingStopped; + if (handler != null) + { + if (syncContext == null) + { + handler(this, new StoppedEventArgs(exception)); + } + else + { + syncContext.Post(state => handler(this, new StoppedEventArgs(exception)), null); + } + } + } + + private void ReadNextPacket(AudioCaptureClient capture) + { + IntPtr buffer; + int framesAvailable; + AudioClientBufferFlags flags; + int packetSize = capture.GetNextPacketSize(); + int recordBufferOffset = 0; + //Debug.WriteLine(string.Format("packet size: {0} samples", packetSize / 4)); + + while (packetSize != 0) + { + buffer = capture.GetBuffer(out framesAvailable, out flags); + + int bytesAvailable = framesAvailable * bytesPerFrame; + + // apparently it is sometimes possible to read more frames than we were expecting? + // fix suggested by Michael Feld: + int spaceRemaining = Math.Max(0, recordBuffer.Length - recordBufferOffset); + if (spaceRemaining < bytesAvailable && recordBufferOffset > 0) + { + if (DataAvailable != null) DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset)); + recordBufferOffset = 0; + } + + // if not silence... + if ((flags & AudioClientBufferFlags.Silent) != AudioClientBufferFlags.Silent) + { + Marshal.Copy(buffer, recordBuffer, recordBufferOffset, bytesAvailable); + } + else + { + Array.Clear(recordBuffer, recordBufferOffset, bytesAvailable); + } + recordBufferOffset += bytesAvailable; + capture.ReleaseBuffer(framesAvailable); + packetSize = capture.GetNextPacketSize(); + } + if (DataAvailable != null) + { + DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset)); + } + } + + /// + /// Dispose + /// + public void Dispose() + { + if (captureState == WasapiCaptureState.Disposed) return; + + try + { + StopRecording(); + + NativeMethods.CloseHandle(hEvent); + audioClient?.Dispose(); + } + catch (Exception ex) + { + Debug.WriteLine("Exception disposing WasapiCaptureRT: " + ex.ToString()); + } + + hEvent = IntPtr.Zero; + audioClient = null; + + captureState = WasapiCaptureState.Disposed; + } + } +} diff --git a/NAudio.Win8/Wave/WaveOutputs/WasapiOutRT.cs b/NAudio.Universal/Wave/WaveOutputs/WasapiOutRT.cs similarity index 97% rename from NAudio.Win8/Wave/WaveOutputs/WasapiOutRT.cs rename to NAudio.Universal/Wave/WaveOutputs/WasapiOutRT.cs index afe89868..9f993450 100644 --- a/NAudio.Win8/Wave/WaveOutputs/WasapiOutRT.cs +++ b/NAudio.Universal/Wave/WaveOutputs/WasapiOutRT.cs @@ -1,654 +1,654 @@ -using System; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; -using System.Threading; -using System.Threading.Tasks; -using Windows.System.Threading; -using NAudio.CoreAudioApi; -using NAudio.CoreAudioApi.Interfaces; -using NAudio.Dsp; -using NAudio.Wave; -using Windows.Media.Devices; -using NAudio.Utils; -using NAudio.Wave.SampleProviders; - -namespace NAudio.Win8.Wave.WaveOutputs -{ - enum WasapiOutState - { - Uninitialized, - Stopped, - Paused, - Playing, - Stopping, - Disposing, - Disposed - } - - /// - /// WASAPI Out for Windows RT - /// - public class WasapiOutRT : IWavePlayer - { - private AudioClient audioClient; - private readonly string device; - private readonly AudioClientShareMode shareMode; - private AudioRenderClient renderClient; - private int latencyMilliseconds; - private int bufferFrameCount; - private int bytesPerFrame; - private byte[] readBuffer; - private volatile WasapiOutState playbackState; - private WaveFormat outputFormat; - private bool resamplerNeeded; - private IntPtr frameEventWaitHandle; - private readonly SynchronizationContext syncContext; - private bool isInitialized; - private readonly AutoResetEvent playThreadEvent; - - /// - /// Playback Stopped - /// - public event EventHandler PlaybackStopped; - - /// - /// WASAPI Out using default audio endpoint - /// - /// ShareMode - shared or exclusive - /// Desired latency in milliseconds - public WasapiOutRT(AudioClientShareMode shareMode, int latency) : - this(GetDefaultAudioEndpoint(), shareMode, latency) - { - - } - - /// - /// Creates a new WASAPI Output - /// - /// Device to use - /// - /// - public WasapiOutRT(string device, AudioClientShareMode shareMode, int latency) - { - this.device = device; - this.shareMode = shareMode; - this.latencyMilliseconds = latency; - this.syncContext = SynchronizationContext.Current; - playThreadEvent = new AutoResetEvent(false); - } - - /// - /// Properties of the client's audio stream. - /// Set before calling init - /// - private AudioClientProperties? audioClientProperties = null; - - private Func waveProviderFunc; - - /// - /// Sets the parameters that describe the properties of the client's audio stream. - /// - /// Boolean value to indicate whether or not the audio stream is hardware-offloaded. - /// An enumeration that is used to specify the category of the audio stream. - /// A bit-field describing the characteristics of the stream. Supported in Windows 8.1 and later. - public void SetClientProperties(bool useHardwareOffload, AudioStreamCategory category, AudioClientStreamOptions options) - { - audioClientProperties = new AudioClientProperties() - { - cbSize = (uint) MarshalHelpers.SizeOf(), - bIsOffload = Convert.ToInt32(useHardwareOffload), - eCategory = category, - Options = options - }; - } - - private async Task Activate() - { - var icbh = new ActivateAudioInterfaceCompletionHandler( - ac2 => - { - - if (this.audioClientProperties != null) - { - IntPtr p = Marshal.AllocHGlobal(Marshal.SizeOf(this.audioClientProperties.Value)); - Marshal.StructureToPtr(this.audioClientProperties.Value, p, false); - ac2.SetClientProperties(p); - Marshal.FreeHGlobal(p); - // TODO: consider whether we can marshal this without the need for AllocHGlobal - } - - /*var wfx = new WaveFormat(44100, 16, 2); - int hr = ac2.Initialize(AudioClientShareMode.Shared, - AudioClientStreamFlags.EventCallback | AudioClientStreamFlags.NoPersist, - 10000000, 0, wfx, IntPtr.Zero);*/ - }); - var IID_IAudioClient2 = new Guid("726778CD-F60A-4eda-82DE-E47610CD78AA"); - IActivateAudioInterfaceAsyncOperation activationOperation; - NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation); - var audioClient2 = await icbh; - this.audioClient = new AudioClient((IAudioClient)audioClient2); - } - - private static string GetDefaultAudioEndpoint() - { - // can't use the MMDeviceEnumerator in WinRT - - return MediaDevice.GetDefaultAudioRenderId(AudioDeviceRole.Default); - } - - private async void PlayThread() - { - await Activate(); - var playbackProvider = Init(); - bool isClientRunning = false; - try - { - if (this.resamplerNeeded) - { - var resampler = new WdlResamplingSampleProvider(playbackProvider.ToSampleProvider(), outputFormat.SampleRate); - playbackProvider = new SampleToWaveProvider(resampler); - } - - // fill a whole buffer - bufferFrameCount = audioClient.BufferSize; - bytesPerFrame = outputFormat.Channels*outputFormat.BitsPerSample/8; - readBuffer = new byte[bufferFrameCount*bytesPerFrame]; - FillBuffer(playbackProvider, bufferFrameCount); - int timeout = 3 * latencyMilliseconds; - - while (playbackState != WasapiOutState.Disposed) - { - if (playbackState != WasapiOutState.Playing) - { - playThreadEvent.WaitOne(500); - } - - // If still playing and notification is ok - if (playbackState == WasapiOutState.Playing) - { - if (!isClientRunning) - { - audioClient.Start(); - isClientRunning = true; - } - // If using Event Sync, Wait for notification from AudioClient or Sleep half latency - var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true); - if (r != 0) throw new InvalidOperationException("Timed out waiting for event"); - // See how much buffer space is available. - int numFramesPadding = 0; - // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize - numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; - - int numFramesAvailable = bufferFrameCount - numFramesPadding; - if (numFramesAvailable > 0) - { - FillBuffer(playbackProvider, numFramesAvailable); - } - } - - if (playbackState == WasapiOutState.Stopping) - { - // play the buffer out - while (audioClient.CurrentPadding > 0) - { - await Task.Delay(latencyMilliseconds / 2); - } - audioClient.Stop(); - isClientRunning = false; - audioClient.Reset(); - playbackState = WasapiOutState.Stopped; - RaisePlaybackStopped(null); - } - if (playbackState == WasapiOutState.Disposing) - { - audioClient.Stop(); - isClientRunning = false; - audioClient.Reset(); - playbackState = WasapiOutState.Disposed; - var disposablePlaybackProvider = playbackProvider as IDisposable; - if (disposablePlaybackProvider!=null) - disposablePlaybackProvider.Dispose(); // do everything on this thread, even dispose in case it is Media Foundation - RaisePlaybackStopped(null); - - } - - } - } - catch (Exception e) - { - RaisePlaybackStopped(e); - } - finally - { - audioClient.Dispose(); - audioClient = null; - renderClient = null; - NativeMethods.CloseHandle(frameEventWaitHandle); - - } - } - - private void RaisePlaybackStopped(Exception e) - { - var handler = PlaybackStopped; - if (handler != null) - { - if (this.syncContext == null) - { - handler(this, new StoppedEventArgs(e)); - } - else - { - syncContext.Post(state => handler(this, new StoppedEventArgs(e)), null); - } - } - } - - private void FillBuffer(IWaveProvider playbackProvider, int frameCount) - { - IntPtr buffer = renderClient.GetBuffer(frameCount); - int readLength = frameCount*bytesPerFrame; - int read = playbackProvider.Read(readBuffer, 0, readLength); - if (read == 0) - { - playbackState = WasapiOutState.Stopping; - } - Marshal.Copy(readBuffer, 0, buffer, read); - int actualFrameCount = read/bytesPerFrame; - /*if (actualFrameCount != frameCount) - { - Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount )); - }*/ - renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); - } - - #region IWavePlayer Members - - /// - /// Begin Playback - /// - public void Play() - { - if (playbackState != WasapiOutState.Playing) - { - playbackState = WasapiOutState.Playing; - playThreadEvent.Set(); - } - } - - /// - /// Stop playback and flush buffers - /// - public void Stop() - { - if (playbackState == WasapiOutState.Playing || playbackState == WasapiOutState.Paused) - { - playbackState = WasapiOutState.Stopping; - playThreadEvent.Set(); - } - } - - /// - /// Stop playback without flushing buffers - /// - public void Pause() - { - if (playbackState == WasapiOutState.Playing) - { - playbackState = WasapiOutState.Paused; - playThreadEvent.Set(); - } - } - - /// - /// Old init implementation. Use the func one - /// - /// - /// - [Obsolete] - public async Task Init(IWaveProvider provider) - { - Init(() => provider); - } - - /// - /// Initializes with a function to create the provider that is made on the playback thread - /// - /// Creates the wave provider - public void Init(Func waveProviderFunc) - { - if (isInitialized) throw new InvalidOperationException("Already Initialized"); - isInitialized = true; - this.waveProviderFunc = waveProviderFunc; - Task.Factory.StartNew(() => - { - PlayThread(); - }); - } - - /// - /// Initialize for playing the specified wave stream - /// - private IWaveProvider Init() - { - var waveProvider = waveProviderFunc(); - long latencyRefTimes = latencyMilliseconds*10000; - outputFormat = waveProvider.WaveFormat; - // first attempt uses the WaveFormat from the WaveStream - WaveFormatExtensible closestSampleRateFormat; - if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat)) - { - // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat) - // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx - // They say : "In shared mode, the audio engine always supports the mix format" - // The MixFormat is more likely to be a WaveFormatExtensible. - if (closestSampleRateFormat == null) - { - WaveFormat correctSampleRateFormat = audioClient.MixFormat; - /*WaveFormat.CreateIeeeFloatWaveFormat( - audioClient.MixFormat.SampleRate, - audioClient.MixFormat.Channels);*/ - - if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) - { - // Iterate from Worst to Best Format - WaveFormatExtensible[] bestToWorstFormats = - { - new WaveFormatExtensible( - outputFormat.SampleRate, 32, - outputFormat.Channels), - new WaveFormatExtensible( - outputFormat.SampleRate, 24, - outputFormat.Channels), - new WaveFormatExtensible( - outputFormat.SampleRate, 16, - outputFormat.Channels), - }; - - // Check from best Format to worst format ( Float32, Int24, Int16 ) - for (int i = 0; i < bestToWorstFormats.Length; i++) - { - correctSampleRateFormat = bestToWorstFormats[i]; - if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) - { - break; - } - correctSampleRateFormat = null; - } - - // If still null, then test on the PCM16, 2 channels - if (correctSampleRateFormat == null) - { - // Last Last Last Chance (Thanks WASAPI) - correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2); - if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) - { - throw new NotSupportedException("Can't find a supported format to use"); - } - } - } - outputFormat = correctSampleRateFormat; - } - else - { - outputFormat = closestSampleRateFormat; - } - - // just check that we can make it. - //using (new MediaFoundationResampler(waveProvider, outputFormat)) - { - } - this.resamplerNeeded = true; - } - else - { - resamplerNeeded = false; - } - - // Init Shared or Exclusive - if (shareMode == AudioClientShareMode.Shared) - { - // With EventCallBack and Shared, - audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0, - outputFormat, Guid.Empty); - - // Get back the effective latency from AudioClient. On Windows 10 it can be 0 - if (audioClient.StreamLatency > 0) - latencyMilliseconds = (int) (audioClient.StreamLatency/10000); - } - else - { - // With EventCallBack and Exclusive, both latencies must equals - audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes, - outputFormat, Guid.Empty); - } - - // Create the Wait Event Handle - frameEventWaitHandle = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS); - audioClient.SetEventHandle(frameEventWaitHandle); - - // Get the RenderClient - renderClient = audioClient.AudioRenderClient; - return waveProvider; - } - - /// - /// Playback State - /// - public PlaybackState PlaybackState - { - get - { - switch (playbackState) - { - case WasapiOutState.Playing: - return PlaybackState.Playing; - case WasapiOutState.Paused: - return PlaybackState.Paused; - default: - return PlaybackState.Stopped; - } - } - } - - #endregion - - /// - /// Dispose - /// - public void Dispose() - { - if (audioClient != null) - { - playbackState = WasapiOutState.Disposing; - playThreadEvent.Set(); - } - } - } - - /// - /// Some useful native methods for Windows 8/10 support ( https://msdn.microsoft.com/en-us/library/windows/desktop/hh802935(v=vs.85).aspx ) - /// - class NativeMethods - { - [DllImport("api-ms-win-core-synch-l1-2-0.dll", CharSet = CharSet.Unicode, ExactSpelling = false, - PreserveSig = true, SetLastError = true)] - internal static extern IntPtr CreateEventExW(IntPtr lpEventAttributes, IntPtr lpName, int dwFlags, - EventAccess dwDesiredAccess); - - - [DllImport("api-ms-win-core-handle-l1-1-0.dll", ExactSpelling = true, PreserveSig = true, SetLastError = true)] - public static extern bool CloseHandle(IntPtr hObject); - - [DllImport("api-ms-win-core-synch-l1-2-0.dll", ExactSpelling = true, PreserveSig = true, SetLastError = true)] - public static extern int WaitForSingleObjectEx(IntPtr hEvent, int milliseconds, bool bAlertable); - - /// - /// Enables Windows Store apps to access preexisting Component Object Model (COM) interfaces in the WASAPI family. - /// - /// A device interface ID for an audio device. This is normally retrieved from a DeviceInformation object or one of the methods of the MediaDevice class. - /// The IID of a COM interface in the WASAPI family, such as IAudioClient. - /// Interface-specific activation parameters. For more information, see the pActivationParams parameter in IMMDevice::Activate. - /// - /// - [DllImport("Mmdevapi.dll", ExactSpelling = true, PreserveSig = false)] - public static extern void ActivateAudioInterfaceAsync( - [In, MarshalAs(UnmanagedType.LPWStr)] string deviceInterfacePath, - [In, MarshalAs(UnmanagedType.LPStruct)] Guid riid, - [In] IntPtr activationParams, // n.b. is actually a pointer to a PropVariant, but we never need to pass anything but null - [In] IActivateAudioInterfaceCompletionHandler completionHandler, - out IActivateAudioInterfaceAsyncOperation activationOperation); - } - - // trying some ideas from Lucian Wischik (ljw1004): - // http://www.codeproject.com/Articles/460145/Recording-and-playing-PCM-audio-on-Windows-8-VB - - [Flags] - internal enum EventAccess - { - STANDARD_RIGHTS_REQUIRED = 0xF0000, - SYNCHRONIZE = 0x100000, - EVENT_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0x3 - } - - internal class ActivateAudioInterfaceCompletionHandler : - IActivateAudioInterfaceCompletionHandler, IAgileObject - { - private Action initializeAction; - private TaskCompletionSource tcs = new TaskCompletionSource(); - - public ActivateAudioInterfaceCompletionHandler( - Action initializeAction) - { - this.initializeAction = initializeAction; - } - - public void ActivateCompleted(IActivateAudioInterfaceAsyncOperation activateOperation) - { - // First get the activation results, and see if anything bad happened then - int hr = 0; - object unk = null; - activateOperation.GetActivateResult(out hr, out unk); - if (hr != 0) - { - tcs.TrySetException(Marshal.GetExceptionForHR(hr, new IntPtr(-1))); - return; - } - - var pAudioClient = (IAudioClient2) unk; - - // Next try to call the client's (synchronous, blocking) initialization method. - try - { - initializeAction(pAudioClient); - tcs.SetResult(pAudioClient); - } - catch (Exception ex) - { - tcs.TrySetException(ex); - } - - - } - - - public TaskAwaiter GetAwaiter() - { - return tcs.Task.GetAwaiter(); - } - } - - [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("41D949AB-9862-444A-80F6-C261334DA5EB")] - interface IActivateAudioInterfaceCompletionHandler - { - //virtual HRESULT STDMETHODCALLTYPE ActivateCompleted(/*[in]*/ _In_ - // IActivateAudioInterfaceAsyncOperation *activateOperation) = 0; - void ActivateCompleted(IActivateAudioInterfaceAsyncOperation activateOperation); - } - - - [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("72A22D78-CDE4-431D-B8CC-843A71199B6D")] - interface IActivateAudioInterfaceAsyncOperation - { - //virtual HRESULT STDMETHODCALLTYPE GetActivateResult(/*[out]*/ _Out_ - // HRESULT *activateResult, /*[out]*/ _Outptr_result_maybenull_ IUnknown **activatedInterface) = 0; - void GetActivateResult([Out] out int activateResult, - [Out, MarshalAs(UnmanagedType.IUnknown)] out object activateInterface); - } - - - [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("726778CD-F60A-4eda-82DE-E47610CD78AA")] - interface IAudioClient2 - { - [PreserveSig] - int Initialize(AudioClientShareMode shareMode, - AudioClientStreamFlags streamFlags, - long hnsBufferDuration, // REFERENCE_TIME - long hnsPeriodicity, // REFERENCE_TIME - [In] WaveFormat pFormat, - [In] IntPtr audioSessionGuid); - - // ref Guid AudioSessionGuid - - /// - /// The GetBufferSize method retrieves the size (maximum capacity) of the endpoint buffer. - /// - int GetBufferSize(out uint bufferSize); - - [return: MarshalAs(UnmanagedType.I8)] - long GetStreamLatency(); - - int GetCurrentPadding(out int currentPadding); - - [PreserveSig] - int IsFormatSupported( - AudioClientShareMode shareMode, - [In] WaveFormat pFormat, - out IntPtr closestMatchFormat); - - int GetMixFormat(out IntPtr deviceFormatPointer); - - // REFERENCE_TIME is 64 bit int - int GetDevicePeriod(out long defaultDevicePeriod, out long minimumDevicePeriod); - - int Start(); - - int Stop(); - - int Reset(); - - int SetEventHandle(IntPtr eventHandle); - - /// - /// The GetService method accesses additional services from the audio client object. - /// - /// The interface ID for the requested service. - /// Pointer to a pointer variable into which the method writes the address of an instance of the requested interface. - [PreserveSig] - int GetService([In, MarshalAs(UnmanagedType.LPStruct)] Guid interfaceId, - [Out, MarshalAs(UnmanagedType.IUnknown)] out object interfacePointer); - - //virtual HRESULT STDMETHODCALLTYPE IsOffloadCapable(/*[in]*/ _In_ - // AUDIO_STREAM_CATEGORY Category, /*[in]*/ _Out_ BOOL *pbOffloadCapable) = 0; - void IsOffloadCapable(int category, out bool pbOffloadCapable); - //virtual HRESULT STDMETHODCALLTYPE SetClientProperties(/*[in]*/ _In_ - // const AudioClientProperties *pProperties) = 0; - void SetClientProperties([In] IntPtr pProperties); - // TODO: try this: void SetClientProperties([In, MarshalAs(UnmanagedType.LPStruct)] AudioClientProperties pProperties); - //virtual HRESULT STDMETHODCALLTYPE GetBufferSizeLimits(/*[in]*/ _In_ - // const WAVEFORMATEX *pFormat, /*[in]*/ _In_ BOOL bEventDriven, /*[in]*/ - // _Out_ REFERENCE_TIME *phnsMinBufferDuration, /*[in]*/ _Out_ - // REFERENCE_TIME *phnsMaxBufferDuration) = 0; - void GetBufferSizeLimits(IntPtr pFormat, bool bEventDriven, - out long phnsMinBufferDuration, out long phnsMaxBufferDuration); - } - - [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("94ea2b94-e9cc-49e0-c0ff-ee64ca8f5b90")] - interface IAgileObject - { - - } - - -} +using System; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; +using System.Threading; +using System.Threading.Tasks; +using Windows.System.Threading; +using NAudio.CoreAudioApi; +using NAudio.CoreAudioApi.Interfaces; +using NAudio.Dsp; +using NAudio.Wave; +using Windows.Media.Devices; +using NAudio.Utils; +using NAudio.Wave.SampleProviders; + +namespace NAudio.Win8.Wave.WaveOutputs +{ + enum WasapiOutState + { + Uninitialized, + Stopped, + Paused, + Playing, + Stopping, + Disposing, + Disposed + } + + /// + /// WASAPI Out for Windows RT + /// + public class WasapiOutRT : IWavePlayer + { + private AudioClient audioClient; + private readonly string device; + private readonly AudioClientShareMode shareMode; + private AudioRenderClient renderClient; + private int latencyMilliseconds; + private int bufferFrameCount; + private int bytesPerFrame; + private byte[] readBuffer; + private volatile WasapiOutState playbackState; + private WaveFormat outputFormat; + private bool resamplerNeeded; + private IntPtr frameEventWaitHandle; + private readonly SynchronizationContext syncContext; + private bool isInitialized; + private readonly AutoResetEvent playThreadEvent; + + /// + /// Playback Stopped + /// + public event EventHandler PlaybackStopped; + + /// + /// WASAPI Out using default audio endpoint + /// + /// ShareMode - shared or exclusive + /// Desired latency in milliseconds + public WasapiOutRT(AudioClientShareMode shareMode, int latency) : + this(GetDefaultAudioEndpoint(), shareMode, latency) + { + + } + + /// + /// Creates a new WASAPI Output + /// + /// Device to use + /// + /// + public WasapiOutRT(string device, AudioClientShareMode shareMode, int latency) + { + this.device = device; + this.shareMode = shareMode; + this.latencyMilliseconds = latency; + this.syncContext = SynchronizationContext.Current; + playThreadEvent = new AutoResetEvent(false); + } + + /// + /// Properties of the client's audio stream. + /// Set before calling init + /// + private AudioClientProperties? audioClientProperties = null; + + private Func waveProviderFunc; + + /// + /// Sets the parameters that describe the properties of the client's audio stream. + /// + /// Boolean value to indicate whether or not the audio stream is hardware-offloaded. + /// An enumeration that is used to specify the category of the audio stream. + /// A bit-field describing the characteristics of the stream. Supported in Windows 8.1 and later. + public void SetClientProperties(bool useHardwareOffload, AudioStreamCategory category, AudioClientStreamOptions options) + { + audioClientProperties = new AudioClientProperties() + { + cbSize = (uint) MarshalHelpers.SizeOf(), + bIsOffload = Convert.ToInt32(useHardwareOffload), + eCategory = category, + Options = options + }; + } + + private async Task Activate() + { + var icbh = new ActivateAudioInterfaceCompletionHandler( + ac2 => + { + + if (this.audioClientProperties != null) + { + IntPtr p = Marshal.AllocHGlobal(Marshal.SizeOf(this.audioClientProperties.Value)); + Marshal.StructureToPtr(this.audioClientProperties.Value, p, false); + ac2.SetClientProperties(p); + Marshal.FreeHGlobal(p); + // TODO: consider whether we can marshal this without the need for AllocHGlobal + } + + /*var wfx = new WaveFormat(44100, 16, 2); + int hr = ac2.Initialize(AudioClientShareMode.Shared, + AudioClientStreamFlags.EventCallback | AudioClientStreamFlags.NoPersist, + 10000000, 0, wfx, IntPtr.Zero);*/ + }); + var IID_IAudioClient2 = new Guid("726778CD-F60A-4eda-82DE-E47610CD78AA"); + IActivateAudioInterfaceAsyncOperation activationOperation; + NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation); + var audioClient2 = await icbh; + this.audioClient = new AudioClient((IAudioClient)audioClient2); + } + + private static string GetDefaultAudioEndpoint() + { + // can't use the MMDeviceEnumerator in WinRT + + return MediaDevice.GetDefaultAudioRenderId(AudioDeviceRole.Default); + } + + private async void PlayThread() + { + await Activate(); + var playbackProvider = Init(); + bool isClientRunning = false; + try + { + if (this.resamplerNeeded) + { + var resampler = new WdlResamplingSampleProvider(playbackProvider.ToSampleProvider(), outputFormat.SampleRate); + playbackProvider = new SampleToWaveProvider(resampler); + } + + // fill a whole buffer + bufferFrameCount = audioClient.BufferSize; + bytesPerFrame = outputFormat.Channels*outputFormat.BitsPerSample/8; + readBuffer = new byte[bufferFrameCount*bytesPerFrame]; + FillBuffer(playbackProvider, bufferFrameCount); + int timeout = 3 * latencyMilliseconds; + + while (playbackState != WasapiOutState.Disposed) + { + if (playbackState != WasapiOutState.Playing) + { + playThreadEvent.WaitOne(500); + } + + // If still playing and notification is ok + if (playbackState == WasapiOutState.Playing) + { + if (!isClientRunning) + { + audioClient.Start(); + isClientRunning = true; + } + // If using Event Sync, Wait for notification from AudioClient or Sleep half latency + var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true); + if (r != 0) throw new InvalidOperationException("Timed out waiting for event"); + // See how much buffer space is available. + int numFramesPadding = 0; + // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize + numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; + + int numFramesAvailable = bufferFrameCount - numFramesPadding; + if (numFramesAvailable > 0) + { + FillBuffer(playbackProvider, numFramesAvailable); + } + } + + if (playbackState == WasapiOutState.Stopping) + { + // play the buffer out + while (audioClient.CurrentPadding > 0) + { + await Task.Delay(latencyMilliseconds / 2); + } + audioClient.Stop(); + isClientRunning = false; + audioClient.Reset(); + playbackState = WasapiOutState.Stopped; + RaisePlaybackStopped(null); + } + if (playbackState == WasapiOutState.Disposing) + { + audioClient.Stop(); + isClientRunning = false; + audioClient.Reset(); + playbackState = WasapiOutState.Disposed; + var disposablePlaybackProvider = playbackProvider as IDisposable; + if (disposablePlaybackProvider!=null) + disposablePlaybackProvider.Dispose(); // do everything on this thread, even dispose in case it is Media Foundation + RaisePlaybackStopped(null); + + } + + } + } + catch (Exception e) + { + RaisePlaybackStopped(e); + } + finally + { + audioClient.Dispose(); + audioClient = null; + renderClient = null; + NativeMethods.CloseHandle(frameEventWaitHandle); + + } + } + + private void RaisePlaybackStopped(Exception e) + { + var handler = PlaybackStopped; + if (handler != null) + { + if (this.syncContext == null) + { + handler(this, new StoppedEventArgs(e)); + } + else + { + syncContext.Post(state => handler(this, new StoppedEventArgs(e)), null); + } + } + } + + private void FillBuffer(IWaveProvider playbackProvider, int frameCount) + { + IntPtr buffer = renderClient.GetBuffer(frameCount); + int readLength = frameCount*bytesPerFrame; + int read = playbackProvider.Read(readBuffer, 0, readLength); + if (read == 0) + { + playbackState = WasapiOutState.Stopping; + } + Marshal.Copy(readBuffer, 0, buffer, read); + int actualFrameCount = read/bytesPerFrame; + /*if (actualFrameCount != frameCount) + { + Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount )); + }*/ + renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); + } + + #region IWavePlayer Members + + /// + /// Begin Playback + /// + public void Play() + { + if (playbackState != WasapiOutState.Playing) + { + playbackState = WasapiOutState.Playing; + playThreadEvent.Set(); + } + } + + /// + /// Stop playback and flush buffers + /// + public void Stop() + { + if (playbackState == WasapiOutState.Playing || playbackState == WasapiOutState.Paused) + { + playbackState = WasapiOutState.Stopping; + playThreadEvent.Set(); + } + } + + /// + /// Stop playback without flushing buffers + /// + public void Pause() + { + if (playbackState == WasapiOutState.Playing) + { + playbackState = WasapiOutState.Paused; + playThreadEvent.Set(); + } + } + + /// + /// Old init implementation. Use the func one + /// + /// + /// + [Obsolete] + public async Task Init(IWaveProvider provider) + { + Init(() => provider); + } + + /// + /// Initializes with a function to create the provider that is made on the playback thread + /// + /// Creates the wave provider + public void Init(Func waveProviderFunc) + { + if (isInitialized) throw new InvalidOperationException("Already Initialized"); + isInitialized = true; + this.waveProviderFunc = waveProviderFunc; + Task.Factory.StartNew(() => + { + PlayThread(); + }); + } + + /// + /// Initialize for playing the specified wave stream + /// + private IWaveProvider Init() + { + var waveProvider = waveProviderFunc(); + long latencyRefTimes = latencyMilliseconds*10000; + outputFormat = waveProvider.WaveFormat; + // first attempt uses the WaveFormat from the WaveStream + WaveFormatExtensible closestSampleRateFormat; + if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat)) + { + // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat) + // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx + // They say : "In shared mode, the audio engine always supports the mix format" + // The MixFormat is more likely to be a WaveFormatExtensible. + if (closestSampleRateFormat == null) + { + WaveFormat correctSampleRateFormat = audioClient.MixFormat; + /*WaveFormat.CreateIeeeFloatWaveFormat( + audioClient.MixFormat.SampleRate, + audioClient.MixFormat.Channels);*/ + + if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) + { + // Iterate from Worst to Best Format + WaveFormatExtensible[] bestToWorstFormats = + { + new WaveFormatExtensible( + outputFormat.SampleRate, 32, + outputFormat.Channels), + new WaveFormatExtensible( + outputFormat.SampleRate, 24, + outputFormat.Channels), + new WaveFormatExtensible( + outputFormat.SampleRate, 16, + outputFormat.Channels), + }; + + // Check from best Format to worst format ( Float32, Int24, Int16 ) + for (int i = 0; i < bestToWorstFormats.Length; i++) + { + correctSampleRateFormat = bestToWorstFormats[i]; + if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) + { + break; + } + correctSampleRateFormat = null; + } + + // If still null, then test on the PCM16, 2 channels + if (correctSampleRateFormat == null) + { + // Last Last Last Chance (Thanks WASAPI) + correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2); + if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) + { + throw new NotSupportedException("Can't find a supported format to use"); + } + } + } + outputFormat = correctSampleRateFormat; + } + else + { + outputFormat = closestSampleRateFormat; + } + + // just check that we can make it. + //using (new MediaFoundationResampler(waveProvider, outputFormat)) + { + } + this.resamplerNeeded = true; + } + else + { + resamplerNeeded = false; + } + + // Init Shared or Exclusive + if (shareMode == AudioClientShareMode.Shared) + { + // With EventCallBack and Shared, + audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0, + outputFormat, Guid.Empty); + + // Get back the effective latency from AudioClient. On Windows 10 it can be 0 + if (audioClient.StreamLatency > 0) + latencyMilliseconds = (int) (audioClient.StreamLatency/10000); + } + else + { + // With EventCallBack and Exclusive, both latencies must equals + audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes, + outputFormat, Guid.Empty); + } + + // Create the Wait Event Handle + frameEventWaitHandle = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS); + audioClient.SetEventHandle(frameEventWaitHandle); + + // Get the RenderClient + renderClient = audioClient.AudioRenderClient; + return waveProvider; + } + + /// + /// Playback State + /// + public PlaybackState PlaybackState + { + get + { + switch (playbackState) + { + case WasapiOutState.Playing: + return PlaybackState.Playing; + case WasapiOutState.Paused: + return PlaybackState.Paused; + default: + return PlaybackState.Stopped; + } + } + } + + #endregion + + /// + /// Dispose + /// + public void Dispose() + { + if (audioClient != null) + { + playbackState = WasapiOutState.Disposing; + playThreadEvent.Set(); + } + } + } + + /// + /// Some useful native methods for Windows 8/10 support ( https://msdn.microsoft.com/en-us/library/windows/desktop/hh802935(v=vs.85).aspx ) + /// + class NativeMethods + { + [DllImport("api-ms-win-core-synch-l1-2-0.dll", CharSet = CharSet.Unicode, ExactSpelling = false, + PreserveSig = true, SetLastError = true)] + internal static extern IntPtr CreateEventExW(IntPtr lpEventAttributes, IntPtr lpName, int dwFlags, + EventAccess dwDesiredAccess); + + + [DllImport("api-ms-win-core-handle-l1-1-0.dll", ExactSpelling = true, PreserveSig = true, SetLastError = true)] + public static extern bool CloseHandle(IntPtr hObject); + + [DllImport("api-ms-win-core-synch-l1-2-0.dll", ExactSpelling = true, PreserveSig = true, SetLastError = true)] + public static extern int WaitForSingleObjectEx(IntPtr hEvent, int milliseconds, bool bAlertable); + + /// + /// Enables Windows Store apps to access preexisting Component Object Model (COM) interfaces in the WASAPI family. + /// + /// A device interface ID for an audio device. This is normally retrieved from a DeviceInformation object or one of the methods of the MediaDevice class. + /// The IID of a COM interface in the WASAPI family, such as IAudioClient. + /// Interface-specific activation parameters. For more information, see the pActivationParams parameter in IMMDevice::Activate. + /// + /// + [DllImport("Mmdevapi.dll", ExactSpelling = true, PreserveSig = false)] + public static extern void ActivateAudioInterfaceAsync( + [In, MarshalAs(UnmanagedType.LPWStr)] string deviceInterfacePath, + [In, MarshalAs(UnmanagedType.LPStruct)] Guid riid, + [In] IntPtr activationParams, // n.b. is actually a pointer to a PropVariant, but we never need to pass anything but null + [In] IActivateAudioInterfaceCompletionHandler completionHandler, + out IActivateAudioInterfaceAsyncOperation activationOperation); + } + + // trying some ideas from Lucian Wischik (ljw1004): + // http://www.codeproject.com/Articles/460145/Recording-and-playing-PCM-audio-on-Windows-8-VB + + [Flags] + internal enum EventAccess + { + STANDARD_RIGHTS_REQUIRED = 0xF0000, + SYNCHRONIZE = 0x100000, + EVENT_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0x3 + } + + internal class ActivateAudioInterfaceCompletionHandler : + IActivateAudioInterfaceCompletionHandler, IAgileObject + { + private Action initializeAction; + private TaskCompletionSource tcs = new TaskCompletionSource(); + + public ActivateAudioInterfaceCompletionHandler( + Action initializeAction) + { + this.initializeAction = initializeAction; + } + + public void ActivateCompleted(IActivateAudioInterfaceAsyncOperation activateOperation) + { + // First get the activation results, and see if anything bad happened then + int hr = 0; + object unk = null; + activateOperation.GetActivateResult(out hr, out unk); + if (hr != 0) + { + tcs.TrySetException(Marshal.GetExceptionForHR(hr, new IntPtr(-1))); + return; + } + + var pAudioClient = (IAudioClient2) unk; + + // Next try to call the client's (synchronous, blocking) initialization method. + try + { + initializeAction(pAudioClient); + tcs.SetResult(pAudioClient); + } + catch (Exception ex) + { + tcs.TrySetException(ex); + } + + + } + + + public TaskAwaiter GetAwaiter() + { + return tcs.Task.GetAwaiter(); + } + } + + [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("41D949AB-9862-444A-80F6-C261334DA5EB")] + interface IActivateAudioInterfaceCompletionHandler + { + //virtual HRESULT STDMETHODCALLTYPE ActivateCompleted(/*[in]*/ _In_ + // IActivateAudioInterfaceAsyncOperation *activateOperation) = 0; + void ActivateCompleted(IActivateAudioInterfaceAsyncOperation activateOperation); + } + + + [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("72A22D78-CDE4-431D-B8CC-843A71199B6D")] + interface IActivateAudioInterfaceAsyncOperation + { + //virtual HRESULT STDMETHODCALLTYPE GetActivateResult(/*[out]*/ _Out_ + // HRESULT *activateResult, /*[out]*/ _Outptr_result_maybenull_ IUnknown **activatedInterface) = 0; + void GetActivateResult([Out] out int activateResult, + [Out, MarshalAs(UnmanagedType.IUnknown)] out object activateInterface); + } + + + [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("726778CD-F60A-4eda-82DE-E47610CD78AA")] + interface IAudioClient2 + { + [PreserveSig] + int Initialize(AudioClientShareMode shareMode, + AudioClientStreamFlags streamFlags, + long hnsBufferDuration, // REFERENCE_TIME + long hnsPeriodicity, // REFERENCE_TIME + [In] WaveFormat pFormat, + [In] IntPtr audioSessionGuid); + + // ref Guid AudioSessionGuid + + /// + /// The GetBufferSize method retrieves the size (maximum capacity) of the endpoint buffer. + /// + int GetBufferSize(out uint bufferSize); + + [return: MarshalAs(UnmanagedType.I8)] + long GetStreamLatency(); + + int GetCurrentPadding(out int currentPadding); + + [PreserveSig] + int IsFormatSupported( + AudioClientShareMode shareMode, + [In] WaveFormat pFormat, + out IntPtr closestMatchFormat); + + int GetMixFormat(out IntPtr deviceFormatPointer); + + // REFERENCE_TIME is 64 bit int + int GetDevicePeriod(out long defaultDevicePeriod, out long minimumDevicePeriod); + + int Start(); + + int Stop(); + + int Reset(); + + int SetEventHandle(IntPtr eventHandle); + + /// + /// The GetService method accesses additional services from the audio client object. + /// + /// The interface ID for the requested service. + /// Pointer to a pointer variable into which the method writes the address of an instance of the requested interface. + [PreserveSig] + int GetService([In, MarshalAs(UnmanagedType.LPStruct)] Guid interfaceId, + [Out, MarshalAs(UnmanagedType.IUnknown)] out object interfacePointer); + + //virtual HRESULT STDMETHODCALLTYPE IsOffloadCapable(/*[in]*/ _In_ + // AUDIO_STREAM_CATEGORY Category, /*[in]*/ _Out_ BOOL *pbOffloadCapable) = 0; + void IsOffloadCapable(int category, out bool pbOffloadCapable); + //virtual HRESULT STDMETHODCALLTYPE SetClientProperties(/*[in]*/ _In_ + // const AudioClientProperties *pProperties) = 0; + void SetClientProperties([In] IntPtr pProperties); + // TODO: try this: void SetClientProperties([In, MarshalAs(UnmanagedType.LPStruct)] AudioClientProperties pProperties); + //virtual HRESULT STDMETHODCALLTYPE GetBufferSizeLimits(/*[in]*/ _In_ + // const WAVEFORMATEX *pFormat, /*[in]*/ _In_ BOOL bEventDriven, /*[in]*/ + // _Out_ REFERENCE_TIME *phnsMinBufferDuration, /*[in]*/ _Out_ + // REFERENCE_TIME *phnsMaxBufferDuration) = 0; + void GetBufferSizeLimits(IntPtr pFormat, bool bEventDriven, + out long phnsMinBufferDuration, out long phnsMaxBufferDuration); + } + + [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("94ea2b94-e9cc-49e0-c0ff-ee64ca8f5b90")] + interface IAgileObject + { + + } + + +} diff --git a/NAudio.Win8/Wave/WaveOutputs/WaveFileWriterRT.cs b/NAudio.Universal/Wave/WaveOutputs/WaveFileWriterRT.cs similarity index 97% rename from NAudio.Win8/Wave/WaveOutputs/WaveFileWriterRT.cs rename to NAudio.Universal/Wave/WaveOutputs/WaveFileWriterRT.cs index 7f839638..df62e554 100644 --- a/NAudio.Win8/Wave/WaveOutputs/WaveFileWriterRT.cs +++ b/NAudio.Universal/Wave/WaveOutputs/WaveFileWriterRT.cs @@ -1,489 +1,489 @@ -using NAudio.Wave.SampleProviders; -using System; -using System.IO; -using System.Threading; -using System.Threading.Tasks; -using Windows.Storage; - -namespace NAudio.Wave -{ - /// - /// This class writes WAV data to a .wav file on disk - /// - public class WaveFileWriterRT : Stream - { - private Stream outStream; - private readonly BinaryWriter writer; - private long dataSizePos; - private long factSampleCountPos; - private long dataChunkSize; - private readonly WaveFormat format; - private string filename; - - // Protects WriteAsync and FlushAsync from overlapping - private readonly Semaphore asyncOperationsLock = new Semaphore(1, 100); - - /// - /// Creates a 16 bit Wave File from an ISampleProvider - /// BEWARE: the source provider must not return data indefinitely - /// - /// The filename to write to - /// The source sample provider - public static Task CreateWaveFile16Async(string filename, ISampleProvider sourceProvider) - { - return CreateWaveFileAsync(filename, new SampleToWaveProvider16(sourceProvider)); - } - - /// - /// Creates a Wave file by reading all the data from a WaveProvider - /// BEWARE: the WaveProvider MUST return 0 from its Read method when it is finished, - /// or the Wave File will grow indefinitely. - /// - /// The filename to use - /// The source WaveProvider - public static async Task CreateWaveFileAsync(string filename, IWaveProvider sourceProvider) - { - StorageFile fileOperation = await StorageFile.GetFileFromPathAsync(filename); - Stream fileStream = await fileOperation.OpenStreamForWriteAsync(); - - using (var writer = new WaveFileWriterRT(fileStream, sourceProvider.WaveFormat)) - { - writer.filename = filename; - long outputLength = 0; - var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; - while (true) - { - int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length); - if (bytesRead == 0) - { - // end of source provider - break; - } - outputLength += bytesRead; - // Write will throw exception if WAV file becomes too large - writer.Write(buffer, 0, bytesRead); - } - } - } - - /// - /// WaveFileWriterRT that actually writes to a stream - /// - /// Stream to be written to - /// Wave format to use - public WaveFileWriterRT(Stream outStream, WaveFormat format) - { - this.outStream = outStream; - this.format = format; - this.writer = new BinaryWriter(outStream, System.Text.Encoding.UTF8); - this.writer.Write(System.Text.Encoding.UTF8.GetBytes("RIFF")); - this.writer.Write((int)0); // placeholder - this.writer.Write(System.Text.Encoding.UTF8.GetBytes("WAVE")); - - this.writer.Write(System.Text.Encoding.UTF8.GetBytes("fmt ")); - format.Serialize(this.writer); - - CreateFactChunk(); - WriteDataChunkHeader(); - } - - private void WriteDataChunkHeader() - { - this.writer.Write(System.Text.Encoding.UTF8.GetBytes("data")); - dataSizePos = this.outStream.Position; - this.writer.Write((int)0); // placeholder - } - - private void CreateFactChunk() - { - if (HasFactChunk()) - { - this.writer.Write(System.Text.Encoding.UTF8.GetBytes("fact")); - this.writer.Write((int)4); - factSampleCountPos = this.outStream.Position; - this.writer.Write((int)0); // number of samples - } - } - - private bool HasFactChunk() - { - return format.Encoding != WaveFormatEncoding.Pcm && - format.BitsPerSample != 0; - } - - /// - /// The wave file name or null if not applicable - /// - public string Filename - { - get { return filename; } - } - - /// - /// Number of bytes of audio in the data chunk - /// - public override long Length - { - get { return dataChunkSize; } - } - - /// - /// WaveFormat of this wave file - /// - public WaveFormat WaveFormat - { - get { return format; } - } - - /// - /// Returns false: Cannot read from a WaveFileWriterRT - /// - public override bool CanRead - { - get { return false; } - } - - /// - /// Returns true: Can write to a WaveFileWriterRT - /// - public override bool CanWrite - { - get { return true; } - } - - /// - /// Returns false: Cannot seek within a WaveFileWriterRT - /// - public override bool CanSeek - { - get { return false; } - } - - /// - /// Returns false: Cannot timeout within a WaveFileWriterRT - /// - public override bool CanTimeout - { - get { return false; } - } - - /// - /// CopyToAsync is not supported for a WaveFileWriterRT - /// - public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken) - { - throw new NotImplementedException("Cannot copy from a WaveFileWriterRT"); - } - - /// - /// Read is not supported for a WaveFileWriterRT - /// - public override int Read(byte[] buffer, int offset, int count) - { - throw new InvalidOperationException("Cannot read from a WaveFileWriterRT"); - } - - /// - /// ReadAsync is not supported for a WaveFileWriterRT - /// - public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) - { - throw new InvalidOperationException("Cannot read from a WaveFileWriterRT"); - } - - /// - /// ReadByte is not supported for a WaveFileWriterRT - /// - public override int ReadByte() - { - throw new InvalidOperationException("Cannot read from a WaveFileWriterRT"); - } - - /// - /// Seek is not supported for a WaveFileWriterRT - /// - public override long Seek(long offset, SeekOrigin origin) - { - throw new InvalidOperationException("Cannot seek within a WaveFileWriterRT"); - } - - /// - /// SetLength is not supported for WaveFileWriterRT - /// - /// - public override void SetLength(long value) - { - throw new InvalidOperationException("Cannot set length of a WaveFileWriterRT"); - } - - /// - /// Gets the Position in the WaveFile (i.e. number of bytes written so far) - /// - public override long Position - { - get { return dataChunkSize; } - set { throw new InvalidOperationException("Repositioning a WaveFileWriterRT is not supported"); } - } - - /// - /// Appends bytes to the WaveFile (assumes they are already in the correct format) - /// - /// the buffer containing the wave data - /// the offset from which to start writing - /// the number of bytes to write - public override void Write(byte[] data, int offset, int count) - { - if (outStream.Length + count > UInt32.MaxValue) - throw new ArgumentException("WAV file too large", "count"); - outStream.Write(data, offset, count); - dataChunkSize += count; - } - - /// - /// Appends bytes to the WaveFile (assumes they are already in the correct format) - /// - /// the buffer containing the wave data - /// the offset from which to start writing - /// the number of bytes to write - /// Cancellation token - public override Task WriteAsync(byte[] buffer, int offset, int count, - CancellationToken cancellationToken) - { - return Task.Run(() => - { - try - { - asyncOperationsLock.WaitOne(); - Write(buffer, offset, count); - } - finally - { - asyncOperationsLock.Release(); - } - }); - } - - /// - /// WriteByte is not supported for a WaveFileWriterRT - /// Use instead - /// - /// value to write - public override void WriteByte(byte value) - { - throw new NotImplementedException(); - } - - private readonly byte[] value24 = new byte[3]; // keep this around to save us creating it every time - - /// - /// Writes a single sample to the Wave file - /// - /// the sample to write (assumed floating point with 1.0f as max value) - public void WriteSample(float sample) - { - if (WaveFormat.BitsPerSample == 16) - { - writer.Write((Int16)(Int16.MaxValue * sample)); - dataChunkSize += 2; - } - else if (WaveFormat.BitsPerSample == 24) - { - var value = BitConverter.GetBytes((Int32)(Int32.MaxValue * sample)); - value24[0] = value[1]; - value24[1] = value[2]; - value24[2] = value[3]; - writer.Write(value24); - dataChunkSize += 3; - } - else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.Extensible) - { - writer.Write(UInt16.MaxValue * (Int32)sample); - dataChunkSize += 4; - } - else if (WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) - { - writer.Write(sample); - dataChunkSize += 4; - } - else - { - throw new InvalidOperationException("Only 16, 24 or 32 bit PCM or IEEE float audio data supported"); - } - } - - /// - /// Writes 32 bit floating point samples to the Wave file - /// They will be converted to the appropriate bit depth depending on the WaveFormat of the WAV file - /// - /// The buffer containing the floating point samples - /// The offset from which to start writing - /// The number of floating point samples to write - public void WriteSamples(float[] samples, int offset, int count) - { - for (int n = 0; n < count; n++) - { - WriteSample(samples[offset + n]); - } - } - - /// - /// Writes 16 bit samples to the Wave file - /// - /// The buffer containing the 16 bit samples - /// The offset from which to start writing - /// The number of 16 bit samples to write - public void WriteSamples(short[] samples, int offset, int count) - { - // 16 bit PCM data - if (WaveFormat.BitsPerSample == 16) - { - for (int sample = 0; sample < count; sample++) - { - writer.Write(samples[sample + offset]); - } - dataChunkSize += (count * 2); - } - // 24 bit PCM data - else if (WaveFormat.BitsPerSample == 24) - { - byte[] value; - for (int sample = 0; sample < count; sample++) - { - value = BitConverter.GetBytes(UInt16.MaxValue * (Int32)samples[sample + offset]); - value24[0] = value[1]; - value24[1] = value[2]; - value24[2] = value[3]; - writer.Write(value24); - } - dataChunkSize += (count * 3); - } - // 32 bit PCM data - else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.Extensible) - { - for (int sample = 0; sample < count; sample++) - { - writer.Write(UInt16.MaxValue * (Int32)samples[sample + offset]); - } - dataChunkSize += (count * 4); - } - // IEEE float data - else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) - { - for (int sample = 0; sample < count; sample++) - { - writer.Write((float)samples[sample + offset] / (float)(Int16.MaxValue + 1)); - } - dataChunkSize += (count * 4); - } - else - { - throw new InvalidOperationException("Only 16, 24 or 32 bit PCM or IEEE float audio data supported"); - } - } - - /// - /// Ensures data is written to disk - /// - public override void Flush() - { - var pos = writer.BaseStream.Position; - UpdateHeader(writer); - writer.BaseStream.Position = pos; - } - - /// - /// Ensures data is written to disk - /// - public override Task FlushAsync(CancellationToken cancellationToken) - { - return Task.Run(() => - { - try - { - asyncOperationsLock.WaitOne(); - Flush(); - } - finally - { - asyncOperationsLock.Release(); - } - }); - } - - #region IDisposable Members - - /// - /// Actually performs the close,making sure the header contains the correct data - /// - /// True if called from Dispose - protected override void Dispose(bool disposing) - { - if (disposing) - { - if (outStream != null) - { - try - { - UpdateHeader(writer); - } - finally - { - // in a finally block as we don't want the FileStream to run its disposer in - // the GC thread if the code above caused an IOException (e.g. due to disk full) - outStream.Dispose(); // will close the underlying base stream - outStream = null; - asyncOperationsLock.Dispose(); - } - } - } - } - - /// - /// Updates the header with file size information - /// - protected virtual void UpdateHeader(BinaryWriter writer) - { - writer.Flush(); - UpdateRiffChunk(writer); - UpdateFactChunk(writer); - UpdateDataChunk(writer); - } - - private void UpdateDataChunk(BinaryWriter writer) - { - writer.Seek((int)dataSizePos, SeekOrigin.Begin); - writer.Write((UInt32)dataChunkSize); - } - - private void UpdateRiffChunk(BinaryWriter writer) - { - writer.Seek(4, SeekOrigin.Begin); - writer.Write((UInt32)(outStream.Length - 8)); - } - - private void UpdateFactChunk(BinaryWriter writer) - { - if (HasFactChunk()) - { - int bitsPerSample = (format.BitsPerSample * format.Channels); - if (bitsPerSample != 0) - { - writer.Seek((int)factSampleCountPos, SeekOrigin.Begin); - - writer.Write((int)((dataChunkSize * 8) / bitsPerSample)); - } - } - } - - /// - /// Finaliser - should only be called if the user forgot to close this WaveFileWriterRT - /// - ~WaveFileWriterRT() - { - System.Diagnostics.Debug.Assert(false, "WaveFileWriterRT was not disposed"); - Dispose(false); - } - - #endregion - } -} +using NAudio.Wave.SampleProviders; +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Windows.Storage; + +namespace NAudio.Wave +{ + /// + /// This class writes WAV data to a .wav file on disk + /// + public class WaveFileWriterRT : Stream + { + private Stream outStream; + private readonly BinaryWriter writer; + private long dataSizePos; + private long factSampleCountPos; + private long dataChunkSize; + private readonly WaveFormat format; + private string filename; + + // Protects WriteAsync and FlushAsync from overlapping + private readonly Semaphore asyncOperationsLock = new Semaphore(1, 100); + + /// + /// Creates a 16 bit Wave File from an ISampleProvider + /// BEWARE: the source provider must not return data indefinitely + /// + /// The filename to write to + /// The source sample provider + public static Task CreateWaveFile16Async(string filename, ISampleProvider sourceProvider) + { + return CreateWaveFileAsync(filename, new SampleToWaveProvider16(sourceProvider)); + } + + /// + /// Creates a Wave file by reading all the data from a WaveProvider + /// BEWARE: the WaveProvider MUST return 0 from its Read method when it is finished, + /// or the Wave File will grow indefinitely. + /// + /// The filename to use + /// The source WaveProvider + public static async Task CreateWaveFileAsync(string filename, IWaveProvider sourceProvider) + { + StorageFile fileOperation = await StorageFile.GetFileFromPathAsync(filename); + Stream fileStream = await fileOperation.OpenStreamForWriteAsync(); + + using (var writer = new WaveFileWriterRT(fileStream, sourceProvider.WaveFormat)) + { + writer.filename = filename; + long outputLength = 0; + var buffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond * 4]; + while (true) + { + int bytesRead = sourceProvider.Read(buffer, 0, buffer.Length); + if (bytesRead == 0) + { + // end of source provider + break; + } + outputLength += bytesRead; + // Write will throw exception if WAV file becomes too large + writer.Write(buffer, 0, bytesRead); + } + } + } + + /// + /// WaveFileWriterRT that actually writes to a stream + /// + /// Stream to be written to + /// Wave format to use + public WaveFileWriterRT(Stream outStream, WaveFormat format) + { + this.outStream = outStream; + this.format = format; + this.writer = new BinaryWriter(outStream, System.Text.Encoding.UTF8); + this.writer.Write(System.Text.Encoding.UTF8.GetBytes("RIFF")); + this.writer.Write((int)0); // placeholder + this.writer.Write(System.Text.Encoding.UTF8.GetBytes("WAVE")); + + this.writer.Write(System.Text.Encoding.UTF8.GetBytes("fmt ")); + format.Serialize(this.writer); + + CreateFactChunk(); + WriteDataChunkHeader(); + } + + private void WriteDataChunkHeader() + { + this.writer.Write(System.Text.Encoding.UTF8.GetBytes("data")); + dataSizePos = this.outStream.Position; + this.writer.Write((int)0); // placeholder + } + + private void CreateFactChunk() + { + if (HasFactChunk()) + { + this.writer.Write(System.Text.Encoding.UTF8.GetBytes("fact")); + this.writer.Write((int)4); + factSampleCountPos = this.outStream.Position; + this.writer.Write((int)0); // number of samples + } + } + + private bool HasFactChunk() + { + return format.Encoding != WaveFormatEncoding.Pcm && + format.BitsPerSample != 0; + } + + /// + /// The wave file name or null if not applicable + /// + public string Filename + { + get { return filename; } + } + + /// + /// Number of bytes of audio in the data chunk + /// + public override long Length + { + get { return dataChunkSize; } + } + + /// + /// WaveFormat of this wave file + /// + public WaveFormat WaveFormat + { + get { return format; } + } + + /// + /// Returns false: Cannot read from a WaveFileWriterRT + /// + public override bool CanRead + { + get { return false; } + } + + /// + /// Returns true: Can write to a WaveFileWriterRT + /// + public override bool CanWrite + { + get { return true; } + } + + /// + /// Returns false: Cannot seek within a WaveFileWriterRT + /// + public override bool CanSeek + { + get { return false; } + } + + /// + /// Returns false: Cannot timeout within a WaveFileWriterRT + /// + public override bool CanTimeout + { + get { return false; } + } + + /// + /// CopyToAsync is not supported for a WaveFileWriterRT + /// + public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken) + { + throw new NotImplementedException("Cannot copy from a WaveFileWriterRT"); + } + + /// + /// Read is not supported for a WaveFileWriterRT + /// + public override int Read(byte[] buffer, int offset, int count) + { + throw new InvalidOperationException("Cannot read from a WaveFileWriterRT"); + } + + /// + /// ReadAsync is not supported for a WaveFileWriterRT + /// + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + throw new InvalidOperationException("Cannot read from a WaveFileWriterRT"); + } + + /// + /// ReadByte is not supported for a WaveFileWriterRT + /// + public override int ReadByte() + { + throw new InvalidOperationException("Cannot read from a WaveFileWriterRT"); + } + + /// + /// Seek is not supported for a WaveFileWriterRT + /// + public override long Seek(long offset, SeekOrigin origin) + { + throw new InvalidOperationException("Cannot seek within a WaveFileWriterRT"); + } + + /// + /// SetLength is not supported for WaveFileWriterRT + /// + /// + public override void SetLength(long value) + { + throw new InvalidOperationException("Cannot set length of a WaveFileWriterRT"); + } + + /// + /// Gets the Position in the WaveFile (i.e. number of bytes written so far) + /// + public override long Position + { + get { return dataChunkSize; } + set { throw new InvalidOperationException("Repositioning a WaveFileWriterRT is not supported"); } + } + + /// + /// Appends bytes to the WaveFile (assumes they are already in the correct format) + /// + /// the buffer containing the wave data + /// the offset from which to start writing + /// the number of bytes to write + public override void Write(byte[] data, int offset, int count) + { + if (outStream.Length + count > UInt32.MaxValue) + throw new ArgumentException("WAV file too large", "count"); + outStream.Write(data, offset, count); + dataChunkSize += count; + } + + /// + /// Appends bytes to the WaveFile (assumes they are already in the correct format) + /// + /// the buffer containing the wave data + /// the offset from which to start writing + /// the number of bytes to write + /// Cancellation token + public override Task WriteAsync(byte[] buffer, int offset, int count, + CancellationToken cancellationToken) + { + return Task.Run(() => + { + try + { + asyncOperationsLock.WaitOne(); + Write(buffer, offset, count); + } + finally + { + asyncOperationsLock.Release(); + } + }); + } + + /// + /// WriteByte is not supported for a WaveFileWriterRT + /// Use instead + /// + /// value to write + public override void WriteByte(byte value) + { + throw new NotImplementedException(); + } + + private readonly byte[] value24 = new byte[3]; // keep this around to save us creating it every time + + /// + /// Writes a single sample to the Wave file + /// + /// the sample to write (assumed floating point with 1.0f as max value) + public void WriteSample(float sample) + { + if (WaveFormat.BitsPerSample == 16) + { + writer.Write((Int16)(Int16.MaxValue * sample)); + dataChunkSize += 2; + } + else if (WaveFormat.BitsPerSample == 24) + { + var value = BitConverter.GetBytes((Int32)(Int32.MaxValue * sample)); + value24[0] = value[1]; + value24[1] = value[2]; + value24[2] = value[3]; + writer.Write(value24); + dataChunkSize += 3; + } + else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.Extensible) + { + writer.Write(UInt16.MaxValue * (Int32)sample); + dataChunkSize += 4; + } + else if (WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) + { + writer.Write(sample); + dataChunkSize += 4; + } + else + { + throw new InvalidOperationException("Only 16, 24 or 32 bit PCM or IEEE float audio data supported"); + } + } + + /// + /// Writes 32 bit floating point samples to the Wave file + /// They will be converted to the appropriate bit depth depending on the WaveFormat of the WAV file + /// + /// The buffer containing the floating point samples + /// The offset from which to start writing + /// The number of floating point samples to write + public void WriteSamples(float[] samples, int offset, int count) + { + for (int n = 0; n < count; n++) + { + WriteSample(samples[offset + n]); + } + } + + /// + /// Writes 16 bit samples to the Wave file + /// + /// The buffer containing the 16 bit samples + /// The offset from which to start writing + /// The number of 16 bit samples to write + public void WriteSamples(short[] samples, int offset, int count) + { + // 16 bit PCM data + if (WaveFormat.BitsPerSample == 16) + { + for (int sample = 0; sample < count; sample++) + { + writer.Write(samples[sample + offset]); + } + dataChunkSize += (count * 2); + } + // 24 bit PCM data + else if (WaveFormat.BitsPerSample == 24) + { + byte[] value; + for (int sample = 0; sample < count; sample++) + { + value = BitConverter.GetBytes(UInt16.MaxValue * (Int32)samples[sample + offset]); + value24[0] = value[1]; + value24[1] = value[2]; + value24[2] = value[3]; + writer.Write(value24); + } + dataChunkSize += (count * 3); + } + // 32 bit PCM data + else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.Extensible) + { + for (int sample = 0; sample < count; sample++) + { + writer.Write(UInt16.MaxValue * (Int32)samples[sample + offset]); + } + dataChunkSize += (count * 4); + } + // IEEE float data + else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) + { + for (int sample = 0; sample < count; sample++) + { + writer.Write((float)samples[sample + offset] / (float)(Int16.MaxValue + 1)); + } + dataChunkSize += (count * 4); + } + else + { + throw new InvalidOperationException("Only 16, 24 or 32 bit PCM or IEEE float audio data supported"); + } + } + + /// + /// Ensures data is written to disk + /// + public override void Flush() + { + var pos = writer.BaseStream.Position; + UpdateHeader(writer); + writer.BaseStream.Position = pos; + } + + /// + /// Ensures data is written to disk + /// + public override Task FlushAsync(CancellationToken cancellationToken) + { + return Task.Run(() => + { + try + { + asyncOperationsLock.WaitOne(); + Flush(); + } + finally + { + asyncOperationsLock.Release(); + } + }); + } + + #region IDisposable Members + + /// + /// Actually performs the close,making sure the header contains the correct data + /// + /// True if called from Dispose + protected override void Dispose(bool disposing) + { + if (disposing) + { + if (outStream != null) + { + try + { + UpdateHeader(writer); + } + finally + { + // in a finally block as we don't want the FileStream to run its disposer in + // the GC thread if the code above caused an IOException (e.g. due to disk full) + outStream.Dispose(); // will close the underlying base stream + outStream = null; + asyncOperationsLock.Dispose(); + } + } + } + } + + /// + /// Updates the header with file size information + /// + protected virtual void UpdateHeader(BinaryWriter writer) + { + writer.Flush(); + UpdateRiffChunk(writer); + UpdateFactChunk(writer); + UpdateDataChunk(writer); + } + + private void UpdateDataChunk(BinaryWriter writer) + { + writer.Seek((int)dataSizePos, SeekOrigin.Begin); + writer.Write((UInt32)dataChunkSize); + } + + private void UpdateRiffChunk(BinaryWriter writer) + { + writer.Seek(4, SeekOrigin.Begin); + writer.Write((UInt32)(outStream.Length - 8)); + } + + private void UpdateFactChunk(BinaryWriter writer) + { + if (HasFactChunk()) + { + int bitsPerSample = (format.BitsPerSample * format.Channels); + if (bitsPerSample != 0) + { + writer.Seek((int)factSampleCountPos, SeekOrigin.Begin); + + writer.Write((int)((dataChunkSize * 8) / bitsPerSample)); + } + } + } + + /// + /// Finaliser - should only be called if the user forgot to close this WaveFileWriterRT + /// + ~WaveFileWriterRT() + { + System.Diagnostics.Debug.Assert(false, "WaveFileWriterRT was not disposed"); + Dispose(false); + } + + #endregion + } +} diff --git a/NAudio.Win8.sln b/NAudio.Win8.sln deleted file mode 100644 index cf2de0a1..00000000 --- a/NAudio.Win8.sln +++ /dev/null @@ -1,68 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 14 -VisualStudioVersion = 14.0.25420.1 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F91DCE20-90A9-49A8-8773-CACECE651F37}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "NAudio.Win8", "NAudio.Win8\NAudio.Win8.csproj", "{90543F38-E793-40C3-972D-3271EBF1DEF4}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "NAudioWin8Demo", "NAudioWin8Demo\NAudioWin8Demo.csproj", "{03A0E22E-5B00-4B87-9CDF-20CC121DCF03}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|ARM = Debug|ARM - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|ARM = Release|ARM - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|ARM.ActiveCfg = Debug|ARM - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|ARM.Build.0 = Debug|ARM - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|x64.ActiveCfg = Debug|x64 - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|x64.Build.0 = Debug|x64 - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|x86.ActiveCfg = Debug|x86 - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Debug|x86.Build.0 = Debug|x86 - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|Any CPU.Build.0 = Release|Any CPU - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|ARM.ActiveCfg = Release|ARM - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|ARM.Build.0 = Release|ARM - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|x64.ActiveCfg = Release|x64 - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|x64.Build.0 = Release|x64 - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|x86.ActiveCfg = Release|x86 - {90543F38-E793-40C3-972D-3271EBF1DEF4}.Release|x86.Build.0 = Release|x86 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|Any CPU.Build.0 = Debug|Any CPU - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|Any CPU.Deploy.0 = Debug|Any CPU - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|ARM.ActiveCfg = Debug|ARM - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|ARM.Build.0 = Debug|ARM - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|ARM.Deploy.0 = Debug|ARM - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x64.ActiveCfg = Debug|x64 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x64.Build.0 = Debug|x64 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x64.Deploy.0 = Debug|x64 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x86.ActiveCfg = Debug|x86 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x86.Build.0 = Debug|x86 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Debug|x86.Deploy.0 = Debug|x86 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|Any CPU.ActiveCfg = Release|Any CPU - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|Any CPU.Build.0 = Release|Any CPU - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|Any CPU.Deploy.0 = Release|Any CPU - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|ARM.ActiveCfg = Release|ARM - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|ARM.Build.0 = Release|ARM - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|ARM.Deploy.0 = Release|ARM - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x64.ActiveCfg = Release|x64 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x64.Build.0 = Release|x64 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x64.Deploy.0 = Release|x64 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x86.ActiveCfg = Release|x86 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x86.Build.0 = Release|x86 - {03A0E22E-5B00-4B87-9CDF-20CC121DCF03}.Release|x86.Deploy.0 = Release|x86 - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/NAudio.Win8.sln.DotSettings b/NAudio.Win8.sln.DotSettings deleted file mode 100644 index e3bb2d5c..00000000 --- a/NAudio.Win8.sln.DotSettings +++ /dev/null @@ -1,6 +0,0 @@ - - System - System.Linq - <Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /> - <Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /> - <Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /> \ No newline at end of file diff --git a/NAudio.Win8/NAudio.Win8.csproj b/NAudio.Win8/NAudio.Win8.csproj deleted file mode 100644 index 035339eb..00000000 --- a/NAudio.Win8/NAudio.Win8.csproj +++ /dev/null @@ -1,837 +0,0 @@ - - - - - Debug - AnyCPU - 8.0.30703 - 2.0 - {90543F38-E793-40C3-972D-3271EBF1DEF4} - Library - Properties - NAudio.Win8 - NAudio.Win8 - en-US - 512 - {BC8A1FFA-BEE3-4634-8014-F334798102B3};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC} - 8.1 - 12 - - - - true - full - false - bin\Debug\ - DEBUG;TRACE;NETFX_CORE - prompt - 4 - false - bin\Debug\NAudio.Win8.XML - - - pdbonly - true - bin\Release\ - TRACE;NETFX_CORE - prompt - 4 - false - bin\Release\NAudio.Win8.XML - - - true - bin\ARM\Debug\ - DEBUG;TRACE;NETFX_CORE - ;2008 - full - ARM - false - prompt - true - - - bin\ARM\Release\ - TRACE;NETFX_CORE - true - ;2008 - pdbonly - ARM - false - prompt - true - - - true - bin\x64\Debug\ - DEBUG;TRACE;NETFX_CORE - ;2008 - full - x64 - false - prompt - true - - - bin\x64\Release\ - TRACE;NETFX_CORE - true - ;2008 - pdbonly - x64 - false - prompt - true - - - true - bin\x86\Debug\ - DEBUG;TRACE;NETFX_CORE - ;2008 - full - x86 - false - prompt - true - - - bin\x86\Release\ - TRACE;NETFX_CORE - true - ;2008 - pdbonly - x86 - false - prompt - true - - - - - - - Codecs\ALawDecoder.cs - - - Codecs\ALawEncoder.cs - - - Codecs\G722Codec.cs - - - Codecs\MuLawDecoder.cs - - - Codecs\MuLawEncoder.cs - - - CoreAudioApi\AudioCaptureClient.cs - - - CoreAudioApi\AudioClient.cs - - - CoreAudioApi\AudioClientBufferFlags.cs - - - CoreAudioApi\AudioClientProperties.cs - - - CoreAudioApi\AudioClientShareMode.cs - - - CoreAudioApi\AudioClientStreamFlags.cs - - - CoreAudioApi\AudioClientStreamOptions.cs - - - CoreAudioApi\AudioClockClient.cs - - - CoreAudioApi\AudioEndpointVolume.cs - - - CoreAudioApi\AudioEndpointVolumeCallback.cs - - - CoreAudioApi\AudioEndpointVolumeChannel.cs - - - CoreAudioApi\AudioEndpointVolumeChannels.cs - - - CoreAudioApi\AudioEndpointVolumeNotificationDelegate.cs - - - CoreAudioApi\AudioEndpointVolumeStepInformation.cs - - - CoreAudioApi\AudioEndpointVolumeVolumeRange.cs - - - CoreAudioApi\AudioMeterInformation.cs - - - CoreAudioApi\AudioMeterInformationChannels.cs - - - CoreAudioApi\AudioRenderClient.cs - - - CoreAudioApi\AudioSessionControl.cs - - - CoreAudioApi\AudioSessionEventsCallback.cs - - - CoreAudioApi\AudioSessionManager.cs - - - CoreAudioApi\AudioSessionNotification.cs - - - CoreAudioApi\AudioStreamCategory.cs - - - CoreAudioApi\AudioStreamVolume.cs - - - CoreAudioApi\AudioVolumeNotificationData.cs - - - CoreAudioApi\DataFlow.cs - - - CoreAudioApi\DeviceState.cs - - - CoreAudioApi\EEndpointHardwareSupport.cs - - - CoreAudioApi\Interfaces\AudioVolumeNotificationDataStruct.cs - - - CoreAudioApi\Interfaces\Blob.cs - - - CoreAudioApi\Interfaces\ClsCtx.cs - - - CoreAudioApi\Interfaces\ErrorCodes.cs - - - CoreAudioApi\Interfaces\IAudioCaptureClient.cs - - - CoreAudioApi\Interfaces\IAudioClient.cs - - - CoreAudioApi\Interfaces\IAudioClock2.cs - - - CoreAudioApi\Interfaces\IAudioEndpointVolume.cs - - - CoreAudioApi\Interfaces\IAudioEndpointVolumeCallback.cs - - - CoreAudioApi\Interfaces\IAudioMeterInformation.cs - - - CoreAudioApi\Interfaces\IAudioRenderClient.cs - - - CoreAudioApi\Interfaces\IAudioSessionControl.cs - - - CoreAudioApi\Interfaces\IAudioSessionEnumerator.cs - - - CoreAudioApi\Interfaces\IAudioSessionEvents.cs - - - CoreAudioApi\Interfaces\IAudioSessionEventsHandler.cs - - - CoreAudioApi\Interfaces\IAudioSessionManager.cs - - - CoreAudioApi\Interfaces\IAudioSessionNotification.cs - - - CoreAudioApi\Interfaces\IAudioStreamVolume.cs - - - CoreAudioApi\Interfaces\IMMDevice.cs - - - CoreAudioApi\Interfaces\IMMDeviceCollection.cs - - - CoreAudioApi\Interfaces\IMMDeviceEnumerator.cs - - - CoreAudioApi\Interfaces\IMMEndpoint.cs - - - CoreAudioApi\Interfaces\IMMNotificationClient.cs - - - CoreAudioApi\Interfaces\IPropertyStore.cs - - - CoreAudioApi\Interfaces\ISimpleAudioVolume.cs - - - CoreAudioApi\Interfaces\MMDeviceEnumeratorComObject.cs - - - CoreAudioApi\Interfaces\StorageAccessMode.cs - - - CoreAudioApi\MMDevice.cs - - - CoreAudioApi\MMDeviceCollection.cs - - - CoreAudioApi\MMDeviceEnumerator.cs - - - CoreAudioApi\PropertyKey.cs - - - CoreAudioApi\PropertyKeys.cs - - - CoreAudioApi\PropertyStore.cs - - - CoreAudioApi\PropertyStoreProperty.cs - - - CoreAudioApi\PropVariant.cs - - - CoreAudioApi\PropVariantNative.cs - - - CoreAudioApi\Role.cs - - - CoreAudioApi\SessionCollection.cs - - - CoreAudioApi\SimpleAudioVolume.cs - - - Dmo\AudioMediaSubtypes.cs - - - Dmo\IWMResamplerProps.cs - - - Dsp\BiQuadFilter.cs - - - Dsp\Complex.cs - - - Dsp\EnvelopeDetector.cs - - - Dsp\EnvelopeGenerator.cs - - - Dsp\FastFourierTransform.cs - - - Dsp\ImpulseResponseConvolution.cs - - - Dsp\SimpleCompressor.cs - - - Dsp\SimpleGate.cs - - - Dsp\WdlResampler.cs - - - FileFormats\Mp3\ChannelMode.cs - - - FileFormats\Mp3\Id3v2Tag.cs - - - FileFormats\Mp3\IMp3FrameDecompressor.cs - - - FileFormats\Mp3\Mp3Frame.cs - - - FileFormats\Mp3\MpegLayer.cs - - - FileFormats\Mp3\MpegVersion.cs - - - FileFormats\Mp3\XingHeader.cs - - - FileFormats\SoundFont\Generator.cs - - - FileFormats\SoundFont\GeneratorBuilder.cs - - - FileFormats\SoundFont\GeneratorEnum.cs - - - FileFormats\SoundFont\InfoChunk.cs - - - FileFormats\SoundFont\Instrument.cs - - - FileFormats\SoundFont\InstrumentBuilder.cs - - - FileFormats\SoundFont\Modulator.cs - - - FileFormats\SoundFont\ModulatorBuilder.cs - - - FileFormats\SoundFont\ModulatorType.cs - - - FileFormats\SoundFont\Preset.cs - - - FileFormats\SoundFont\PresetBuilder.cs - - - FileFormats\SoundFont\PresetsChunk.cs - - - FileFormats\SoundFont\RiffChunk.cs - - - FileFormats\SoundFont\SampleDataChunk.cs - - - FileFormats\SoundFont\SampleHeader.cs - - - FileFormats\SoundFont\SampleHeaderBuilder.cs - - - FileFormats\SoundFont\SampleMode.cs - - - FileFormats\SoundFont\SFSampleLink.cs - - - FileFormats\SoundFont\SFVersion.cs - - - FileFormats\SoundFont\SFVersionBuilder.cs - - - FileFormats\SoundFont\SoundFont.cs - - - FileFormats\SoundFont\StructureBuilder.cs - - - FileFormats\SoundFont\Zone.cs - - - FileFormats\SoundFont\ZoneBuilder.cs - - - MediaFoundation\AudioSubtypes.cs - - - MediaFoundation\IMFActivate.cs - - - MediaFoundation\IMFAttributes.cs - - - MediaFoundation\IMFByteStream.cs - - - MediaFoundation\IMFCollection.cs - - - MediaFoundation\IMFMediaBuffer.cs - - - MediaFoundation\IMFMediaEvent.cs - - - MediaFoundation\IMFMediaType.cs - - - MediaFoundation\IMFReadWriteClassFactory.cs - - - MediaFoundation\IMFSample.cs - - - MediaFoundation\IMFSinkWriter.cs - - - MediaFoundation\IMFSourceReader.cs - - - MediaFoundation\IMFTransform.cs - - - MediaFoundation\MediaEventType.cs - - - MediaFoundation\MediaFoundationAttributes.cs - - - MediaFoundation\MediaFoundationErrors.cs - - - MediaFoundation\MediaFoundationHelpers.cs - - - MediaFoundation\MediaFoundationInterop.cs - - - MediaFoundation\MediaFoundationTransform.cs - - - MediaFoundation\MediaFoundationTransformCategories.cs - - - MediaFoundation\MediaType.cs - - - MediaFoundation\MediaTypes.cs - - - MediaFoundation\MFT_INPUT_STREAM_INFO.cs - - - MediaFoundation\MFT_MESSAGE_TYPE.cs - - - MediaFoundation\MFT_OUTPUT_DATA_BUFFER.cs - - - MediaFoundation\MFT_OUTPUT_STREAM_INFO.cs - - - MediaFoundation\MFT_REGISTER_TYPE_INFO.cs - - - MediaFoundation\MF_SINK_WRITER_STATISTICS.cs - - - MediaFoundation\_MFT_ENUM_FLAG.cs - - - MediaFoundation\_MFT_INPUT_STATUS_FLAGS.cs - - - MediaFoundation\_MFT_INPUT_STREAM_INFO_FLAGS.cs - - - MediaFoundation\_MFT_OUTPUT_DATA_BUFFER_FLAGS.cs - - - MediaFoundation\_MFT_OUTPUT_STATUS_FLAGS.cs - - - MediaFoundation\_MFT_OUTPUT_STREAM_INFO_FLAGS.cs - - - MediaFoundation\_MFT_PROCESS_OUTPUT_FLAGS.cs - - - MediaFoundation\_MFT_PROCESS_OUTPUT_STATUS.cs - - - MediaFoundation\_MFT_SET_TYPE_FLAGS.cs - - - Utils\BufferHelpers.cs - - - Utils\ByteArrayExtensions.cs - - - Utils\ByteEncoding.cs - - - Utils\CircularBuffer.cs - - - Utils\Decibels.cs - - - Utils\FieldDescriptionAttribute.cs - - - Utils\FieldDescriptionHelper.cs - - - Utils\HResult.cs - - - Utils\IEEE.cs - - - Utils\IgnoreDisposeStream.cs - - - Utils\MarshalHelpers.cs - - - Utils\MergeSort.cs - - - Utils\NativeMethods.cs - - - Wave\WaveInputs\WaveInEventArgs.cs - - - Wave\SampleChunkConverters\ISampleChunkConverter.cs - - - Wave\SampleChunkConverters\Mono16SampleChunkConverter.cs - - - Wave\SampleChunkConverters\Mono24SampleChunkConverter.cs - - - Wave\SampleChunkConverters\Mono8SampleChunkConverter.cs - - - Wave\SampleChunkConverters\MonoFloatSampleChunkConverter.cs - - - Wave\SampleChunkConverters\Stereo16SampleChunkConverter.cs - - - Wave\SampleChunkConverters\Stereo24SampleChunkConverter.cs - - - Wave\SampleChunkConverters\Stereo8SampleChunkConverter.cs - - - Wave\SampleChunkConverters\StereoFloatSampleChunkConverter.cs - - - Wave\SampleProviders\ConcatenatingSampleProvider.cs - - - Wave\SampleProviders\FadeInOutSampleProvider.cs - - - Wave\SampleProviders\MeteringSampleProvider.cs - - - Wave\SampleProviders\MixingSampleProvider.cs - - - Wave\SampleProviders\MonoToStereoSampleProvider.cs - - - Wave\SampleProviders\MultiplexingSampleProvider.cs - - - Wave\SampleProviders\NotifyingSampleProvider.cs - - - Wave\SampleProviders\OffsetSampleProvider.cs - - - Wave\SampleProviders\PanningSampleProvider.cs - - - Wave\SampleProviders\Pcm16BitToSampleProvider.cs - - - Wave\SampleProviders\Pcm24BitToSampleProvider.cs - - - Wave\SampleProviders\Pcm32BitToSampleProvider.cs - - - Wave\SampleProviders\Pcm8BitToSampleProvider.cs - - - Wave\SampleProviders\SampleChannel.cs - - - Wave\SampleProviders\SampleProviderConverterBase.cs - - - Wave\SampleProviders\SampleProviderConverters.cs - - - Wave\SampleProviders\SampleToWaveProvider.cs - - - Wave\SampleProviders\SampleToWaveProvider16.cs - - - Wave\SampleProviders\SampleToWaveProvider24.cs - - - Wave\SampleProviders\SignalGenerator.cs - - - Wave\SampleProviders\StereoToMonoSampleProvider.cs - - - Wave\SampleProviders\VolumeSampleProvider.cs - - - Wave\SampleProviders\WaveToSampleProvider.cs - - - Wave\SampleProviders\WaveToSampleProvider64.cs - - - Wave\SampleProviders\WdlResamplingSampleProvider.cs - - - Wave\WaveExtensionMethods.cs - - - Wave\WaveFormats\AdpcmWaveFormat.cs - - - Wave\WaveFormats\Gsm610WaveFormat.cs - - - Wave\WaveFormats\ImaAdpcmWaveFormat.cs - - - Wave\WaveFormats\Mp3WaveFormat.cs - - - Wave\WaveFormats\OggWaveFormat.cs - - - Wave\WaveFormats\TrueSpeechWaveFormat.cs - - - Wave\WaveFormats\WaveFormat.cs - - - Wave\WaveFormats\WaveFormatEncoding.cs - - - Wave\WaveFormats\WaveFormatExtensible.cs - - - Wave\WaveFormats\WaveFormatExtraData.cs - - - Wave\WaveFormats\WmaWaveFormat.cs - - - Wave\WaveInputs\IWaveIn.cs - - - Wave\WaveOutputs\IWaveBuffer.cs - - - Wave\WaveOutputs\IWaveProvider.cs - - - Wave\WaveOutputs\IWaveProviderFloat.cs - - - Wave\WaveOutputs\PlaybackState.cs - - - Wave\WaveOutputs\StoppedEventArgs.cs - - - Wave\WaveOutputs\WaveBuffer.cs - - - Wave\WaveProviders\BufferedWaveProvider.cs - - - Wave\WaveProviders\MonoToStereoProvider16.cs - - - Wave\WaveProviders\MultiplexingWaveProvider.cs - - - Wave\WaveProviders\StereoToMonoProvider16.cs - - - Wave\WaveProviders\VolumeWaveProvider16.cs - - - Wave\WaveProviders\Wave16toFloatProvider.cs - - - Wave\WaveProviders\WaveFloatTo16Provider.cs - - - Wave\WaveProviders\WaveInProvider.cs - - - Wave\WaveProviders\WaveProvider16.cs - - - Wave\WaveProviders\WaveProvider32.cs - - - Wave\WaveStreams\BlockAlignReductionStream.cs - - - Wave\WaveStreams\ISampleNotifier.cs - - - Wave\WaveStreams\MediaFoundationReader.cs - - - Wave\WaveStreams\RawSourceWaveStream.cs - - - Wave\WaveStreams\SimpleCompressorStream.cs - - - Wave\WaveStreams\WaveChannel32.cs - - - Wave\WaveStreams\WaveOffsetStream.cs - - - Wave\WaveStreams\WaveStream.cs - - - - - - - - - 12.0 - - - - \ No newline at end of file diff --git a/NAudio.Win8/Properties/AssemblyInfo.cs b/NAudio.Win8/Properties/AssemblyInfo.cs deleted file mode 100644 index 22b7f499..00000000 --- a/NAudio.Win8/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,29 +0,0 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("NAudio.Win8")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("NAudio.Win8")] -[assembly: AssemblyCopyright("Copyright © Mark Heath 2013")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.7.0.15")] -[assembly: AssemblyFileVersion("1.7.0.15")] -[assembly: ComVisible(false)] \ No newline at end of file diff --git a/NAudio.Win8/Wave/WaveOutputs/IWavePlayer.cs b/NAudio.Win8/Wave/WaveOutputs/IWavePlayer.cs deleted file mode 100644 index 6605a3d9..00000000 --- a/NAudio.Win8/Wave/WaveOutputs/IWavePlayer.cs +++ /dev/null @@ -1,53 +0,0 @@ -using System; -using System.Linq; -using System.Threading.Tasks; - -namespace NAudio.Wave -{ - /// - /// Represents the interface to a device that can play audio - /// - public interface IWavePlayer : IDisposable - { - /// - /// Begin playback - /// - void Play(); - - /// - /// Stop playback - /// - void Stop(); - - /// - /// Pause Playback - /// - void Pause(); - - /// - /// Obsolete init method - /// - /// - /// - [Obsolete] - Task Init(IWaveProvider waveProvider); - - /// - /// Initialise playback - /// - /// Function to create the waveprovider to be played - /// Called on the playback thread - void Init(Func waveProviderFunc); - - /// - /// Current playback state - /// - PlaybackState PlaybackState { get; } - - /// - /// Indicates that playback has gone into a stopped state due to - /// reaching the end of the input stream or an error has been encountered during playback - /// - event EventHandler PlaybackStopped; - } -} diff --git a/NAudio.nuspec b/NAudio.nuspec index 86251ef2..6f14be8f 100644 --- a/NAudio.nuspec +++ b/NAudio.nuspec @@ -13,12 +13,10 @@ C# .NET audio sound - - - - + + diff --git a/NAudio/Properties/AssemblyInfo.cs b/NAudio/Properties/AssemblyInfo.cs index 97418b5b..5f9cc874 100644 --- a/NAudio/Properties/AssemblyInfo.cs +++ b/NAudio/Properties/AssemblyInfo.cs @@ -32,5 +32,5 @@ // // You can specify all the values or you can default the Revision and Build Numbers // by using the '*' as shown below: -[assembly: AssemblyVersion("1.8.3.0")] -[assembly: AssemblyFileVersion("1.8.3.0")] +[assembly: AssemblyVersion("1.8.4.0")] +[assembly: AssemblyFileVersion("1.8.4.0")] diff --git a/NAudioUniversalDemo/MainPageViewModel.cs b/NAudioUniversalDemo/MainPageViewModel.cs index 9fa641e8..04eafec8 100644 --- a/NAudioUniversalDemo/MainPageViewModel.cs +++ b/NAudioUniversalDemo/MainPageViewModel.cs @@ -1,6 +1,5 @@ using System; using System.Linq; -using System.Threading.Tasks; using Windows.Storage.Streams; using NAudio.CoreAudioApi; using NAudio.Wave; @@ -34,18 +33,12 @@ public MainPageViewModel() private void Stop() { - if (player != null) - { - player.Stop(); - } + player?.Stop(); } private void Pause() { - if (player != null) - { - player.Pause(); - } + player?.Pause(); } private void Play() @@ -116,10 +109,7 @@ private async void RecorderOnDataAvailable(object sender, WaveInEventArgs waveIn private void StopRecording() { - if (recorder != null) - { - recorder.StopRecording(); - } + recorder?.StopRecording(); } private void RecorderOnRecordingStopped(object sender, StoppedEventArgs stoppedEventArgs) @@ -157,16 +147,16 @@ private async void Load() if (file == null) return; var stream = await file.OpenAsync(FileAccessMode.Read); if (stream == null) return; - this.selectedStream = stream; + selectedStream = stream; PlayCommand.IsEnabled = true; } - public DelegateCommand LoadCommand { get; private set; } - public DelegateCommand PlayCommand { get; private set; } - public DelegateCommand PauseCommand { get; private set; } - public DelegateCommand StopCommand { get; private set; } - public DelegateCommand RecordCommand { get; private set; } - public DelegateCommand StopRecordingCommand { get; private set; } + public DelegateCommand LoadCommand { get; } + public DelegateCommand PlayCommand { get; } + public DelegateCommand PauseCommand { get; } + public DelegateCommand StopCommand { get; } + public DelegateCommand RecordCommand { get; } + public DelegateCommand StopRecordingCommand { get; } public MediaElement MediaElement { get; set; } } diff --git a/NAudioUniversalDemo/MediaFoundationReaderUniversal.cs b/NAudioUniversalDemo/MediaFoundationReaderUniversal.cs index 22b371a9..5bbd546d 100644 --- a/NAudioUniversalDemo/MediaFoundationReaderUniversal.cs +++ b/NAudioUniversalDemo/MediaFoundationReaderUniversal.cs @@ -15,7 +15,7 @@ public class MediaFoundationReaderUniversalSettings : MediaFoundationReaderSetti public MediaFoundationReaderUniversalSettings() { // can't recreate since we're using a file stream - this.SingleReaderObject = true; + SingleReaderObject = true; } public IRandomAccessStream Stream { get; set; } @@ -56,9 +56,9 @@ protected override IMFSourceReader CreateReader(MediaFoundationReaderSettings se protected override void Dispose(bool disposing) { - if (disposing && settings.Stream != null) + if (disposing) { - settings.Stream.Dispose(); + settings.Stream?.Dispose(); } base.Dispose(disposing); } diff --git a/NAudioUniversalDemo/Properties/AssemblyInfo.cs b/NAudioUniversalDemo/Properties/AssemblyInfo.cs index 1db0c95e..ac8cfdb4 100644 --- a/NAudioUniversalDemo/Properties/AssemblyInfo.cs +++ b/NAudioUniversalDemo/Properties/AssemblyInfo.cs @@ -10,7 +10,7 @@ [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("NAudioUniversalDemo")] -[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyCopyright("Copyright © 2017")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] diff --git a/NAudioWin8Demo/App.xaml b/NAudioWin8Demo/App.xaml deleted file mode 100644 index eab92a74..00000000 --- a/NAudioWin8Demo/App.xaml +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - - - - - - - diff --git a/NAudioWin8Demo/App.xaml.cs b/NAudioWin8Demo/App.xaml.cs deleted file mode 100644 index 2e811b77..00000000 --- a/NAudioWin8Demo/App.xaml.cs +++ /dev/null @@ -1,90 +0,0 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using Windows.ApplicationModel; -using Windows.ApplicationModel.Activation; -using Windows.Foundation; -using Windows.Foundation.Collections; -using Windows.UI.Xaml; -using Windows.UI.Xaml.Controls; -using Windows.UI.Xaml.Controls.Primitives; -using Windows.UI.Xaml.Data; -using Windows.UI.Xaml.Input; -using Windows.UI.Xaml.Media; -using Windows.UI.Xaml.Navigation; - -// The Blank Application template is documented at http://go.microsoft.com/fwlink/?LinkId=234227 - -namespace NAudioWin8Demo -{ - /// - /// Provides application-specific behavior to supplement the default Application class. - /// - sealed partial class App : Application - { - /// - /// Initializes the singleton application object. This is the first line of authored code - /// executed, and as such is the logical equivalent of main() or WinMain(). - /// - public App() - { - this.InitializeComponent(); - this.Suspending += OnSuspending; - } - - /// - /// Invoked when the application is launched normally by the end user. Other entry points - /// will be used when the application is launched to open a specific file, to display - /// search results, and so forth. - /// - /// Details about the launch request and process. - protected override void OnLaunched(LaunchActivatedEventArgs args) - { - Frame rootFrame = Window.Current.Content as Frame; - - // Do not repeat app initialization when the Window already has content, - // just ensure that the window is active - if (rootFrame == null) - { - // Create a Frame to act as the navigation context and navigate to the first page - rootFrame = new Frame(); - - if (args.PreviousExecutionState == ApplicationExecutionState.Terminated) - { - //TODO: Load state from previously suspended application - } - - // Place the frame in the current Window - Window.Current.Content = rootFrame; - } - - if (rootFrame.Content == null) - { - // When the navigation stack isn't restored navigate to the first page, - // configuring the new page by passing required information as a navigation - // parameter - if (!rootFrame.Navigate(typeof(MainPage), args.Arguments)) - { - throw new Exception("Failed to create initial page"); - } - } - // Ensure the current window is active - Window.Current.Activate(); - } - - /// - /// Invoked when application execution is being suspended. Application state is saved - /// without knowing whether the application will be terminated or resumed with the contents - /// of memory still intact. - /// - /// The source of the suspend request. - /// Details about the suspend request. - private void OnSuspending(object sender, SuspendingEventArgs e) - { - var deferral = e.SuspendingOperation.GetDeferral(); - //TODO: Save application state and stop any background activity - deferral.Complete(); - } - } -} diff --git a/NAudioWin8Demo/Assets/Logo.png b/NAudioWin8Demo/Assets/Logo.png deleted file mode 100644 index 57934f14e74ba4516180f38102501ccd08fe738d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2724 zcma)8`#%$k8>X_{LUW0gG(%(&M!AHIv03T3HVnO{es9^LEhw3Fs*Z1`rb5MZvg>OWwa&SG0J1H$UjZRNg*+kBQ~h-K{=)pkQw4@ zaDi<%+)L(^`Tcg9A>YNm{x01rAx~)=gX*kBQi1Ny9JN|gNc!d;6u=4m+Pl%SD# z0Af|LN@ZYi319^m`O)ohOOkiF{M+hzr|?%=E~5?Hf>|WnM6I?3RSo}#WT9gu%N2SY9PW^e3@T*D%Z*p63>I8ByXEDMT zzx`{`7rk+PXpY=)?QYPWl7|Vl#^0UiD5dc$P1Xu6mNS{#=$-Y-v9YnMnDf$YRw{c! z-#2rLLS)i&vP05X%I&7W#2M$g%8x%L8+o#*4S-5q_s64$2eArW1co>Au3Pnzap~(V z%Jo{>&;0Hrb~h@Ha8{|26zdwm`YiW;`gHxQB9h=;Doju)3G&R~tjxON^woXNlhN3mTf&f!1#xE)2qDR0#I@@K~sTF53|~;O(KwjI49^ z;6H-gM@U|Ch7XK9PlNXgiuXT_q2_Np(;*aQvnIIID;T~Xk+zSn*3xXbm@ySh~GK_B>_YVpR z(jzTvR~uLqnh@X^X)|}o;pTMjlVgtn){Mdrg(!kzHg^FC@)svE$V7(DBj!*ElO4~t zxyuF}1ZgV%+lX;%@Bl!5XuGnXzeu(uLhVIiavDOQ)cXh0kv4bO%4Oz+Z2Lz{l*Xk} zX6reVtLPzTHNskX!{YbzPbFolJqTMyW7K243s|_6lDaEie$it68E>RcUQ_t-Ju$H< zo=Uq}$ccVZvTOHs=yYU7&jI!;oWi|HN~1){_6X;@?n$PtHnhDC__-vT=ZKyR;Mlm zj6a(LWrvtcaH8Bpu zukln1WECfWvml^V3_h@#qyrb((tp(-0IlDrIz!w(X1E5gMx#C{7=w|RKJH!JmWmjV zHJs=m^0O%=Wvp4pT%kKH`lYmf^kilB(=Uq=lR3{x*f@9Ny##vN(A-tf-eWD7 z(d1YFXW;ML=b<`P*`G{`{LrZUzv21IIY68FHnoxE4%fbbnfn{C-Zgx(=E*Q&nq36A zq|Nm}!$3o3OIn!fgORka)+U2DSmPQ-$uS=@VgR&i2h#gG)HS6C{>?s*0v(5e^yS;y zR245dKX_o>jty>5BkWMvrcQ&}hvhi$gZ`q&_o@mOPtBBa^C42C2nhL7Rk>PylS^wy z$6GA-rr|WMU$gs?E>~HR)hwpp`$CuC&5ogHg0C3-d~M6@h2`Ou)#0ra^M8`clm2tw^)Dv6kY3rCBQ^ZRC}%RzLmJDRgwrz z&>uhAqD{9FOlz3iP!uGS9OIxTxyI@+GxR=*=~4r{!Lp)C2~3gLA9w5?m^wHlF$cPi z;cHq-Eb=6^6^~GsHiu}^NoC9v_dBpjE$^3ZoK@iA-PS+1Tzxrltl9cir=&#Q5%N}e zfJ*qqNJ#ze_63YFB$pjJ@CO@VTJVsUQWB&${VjVS^c=4?_|WSs7%#`c-5Mov^AqV` zABCjRa1DYTw}H zW~B@;s?`=XXqVS`k$8n**!dUs!k2ofybYRYc7e~VuT@g?g=Z?Iewox#N&_r60hdnu z#T`u@=cEMJ$DBNz5vI~E<^z~hV$CLcN0=_K}w5}`VB8O}Ely&UY< z=-+g1yL1J7UPb}uIi=CJD~((^;!4jwd5_yJ=`;G>fiH-5&+(OAj9B$kg<}TsezqoA z;dW9u&)7?dojQ{q$17wME~*bA!H%h=Zj}G^YT2EyCqgr?!$viDm#~N(J+tYH*61O* z^sm`z?9ZUm_Vn-@j8jRG@O7E90sF=;k_9(jDt%ZK9qMdz3sbDD*F>1X*Pr#Tjy5p| zYF=0ysdro0yjFI6ZEXbOd#Hd?*^imH5~h_UK~^wbUl~*-Ok^A##3IcJ=}GJ}2SEOo zFz2H({)}ZFTLPrMTd5lW7j$TGqzJVcG1M2>zC@h|Il61jq5nJ7%>e6?_~=i zJf2IlWDl(*01;=}Z?y{&ra2^CXjq8n2csqAscB!dyNs-E4`}Lc(TEQs_e{X{r6PDC zvPF-et&g(b{kYp++dQqAJc{<6g%l~%Rytc6h=ZR{t4?RfK*?+yrzmErn^Lb=14^rh z;?ZnLP+ee-Ljhh1vvGc^Zl?#a{$d=@8E-miH|a~%=%la{ur`6n(5Hs{A*AuN8Ntm_ zPchB7uEzjfZ6rdG@(ISLxe8jntRx7>jHS4u4)RA;^o#7!_e&N+=ROt`7n>dLnm$x^ zaf4w48#Nz%+ugE`gK?j|axoJ_?qoM|gq}Rp1NX=QMi#{FcP8F-&Ip>NEOl(`%QU)A pk=}*Tf&cN}5(Zz7W#ZfR1*Z=tu?;TEgz!H~0W{LqvIcRD@;}^G2xI^N diff --git a/NAudioWin8Demo/Assets/SmallLogo.png b/NAudioWin8Demo/Assets/SmallLogo.png deleted file mode 100644 index 4907b99016aa2966d2d6ed7ac62210b640bba202..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 808 zcmV+@1K0eCP)N2bZe?^J zG%heMHD!e|WdHyIrb$FWR7gwhRZEN0P#CRm6clG6BR-O;lgwZc5sFD}ZqndFC=6Id z>T4l`uKWqEst#g>K?HFtf-npJfZ##|7j6X^cT1BGEg5FC(F&zIaxZ_`v8tF!8X zLvzpfIQNm<=EfLf%iy~11Y_PDW9~)AagGQ%Gf;aDyvjL0UnmrI2{|1|oOT?y1$G>V zf5*5iLYaa37wGeqdSMZ7g`5t?_~R&%KNQla*oFzbr=FHoJSnt(h*@^9-IkDsLQ6Fc zH&wlz;B|_j=X2_b-D6>L8YJV>LK+Gy`TS&4)z)M-6zwv_(C^uQGyWZhGR8~rQZ~kR z-3=7I7PFz~b75ivwr%f4T}>u>6ex!0&=_yG8z!d##vdrjvaBf@W6J`VcQ&bLi(z5{ zFqxpbUV8Hs!}9|g!}xZXoCLPQLqZ$oABB_)j!$0+RDC|?>am%#BGiLYskDcTWZJf` zQH(Km3SY@j^!{v`=7F#v1_HOhA29ud*`E;dKLh$!@c!Ula9h~TNg;=;y?{Uifi?Lo zFqvg=JtpD{4OLqRRBgRWecAW@gK{3xw)qXwhQ8$`)MESjQs1g!sGtC=<}1}|bvtQD ztYf?p6UDVvRj)M_eIq8!>vYC)9udYjsRw>9ktwuSG)@JK-b6EVW`#5|VE82J=?%Vq zVchc_Q!>uyT1-sn46#@9%7$U=mNla1dHJZvUtAz~4x*w>E(*$8@Q2SSyD_%uKE_YG z^vV&t8s*?`Z=b9YZQC=Wo*9hsf9wcpWY9}Bgt7Z5O5$H|PVbUL><%%Zy++ghvL+Mp zZ(tkO>QQ0S56kkWU|L5a%u|`+j3Z*jxDEL|VKErE40d7#v!>}9Nz$!Dl(k~9s8=eL m9Wb96z(b<-7-MV&a=BlVvX~3gcUc$!0000x2|rt9k5UZ5gXF05TpnSD!o?;#n1yukX{2@g&UYfxFP{d(OFkz`bA22Yz9#Br{_^W6WpN6|SeF%6NwJ%#kBU7}eDt7#unB z7wpK9qr#_70C$3TDWky4Q4a%EvF0TL=e>>jSzJKIMRqWY4$Z_EM^i#FR9!HLd zUO)UhT0A!s2i)ZFd}!ur=<49<197)M@)QC^c#0zIJ+DfMii=9yhf3NXIU;*m{lR@B zUn_FvY2TZw$gRg=Vn;nvMl7tl!)ifiVOM)KZl?{2K9QF=eLCd{^F_5g-0zP5#c}Gq z%?D)YKc~CGjxsBLJ{H9t^Y^KkYZg3?r#P73ffTpj{lrq~tq1zh*Z#9fXw}=fX+sob z7BP3`z{tngYs{!^#L^O(4t8y21*AM8*?Hn9@N$(0cK+~k?fw5={x2H;AB=(f)U}ZF zoGlIxX<}^kYz;Q+OZ^8zy!Tavmm4pGzTN}g9sJDefr4;BWCs>2@1Il#?wk-yz0E*4 z)$1o3#wd3lxFhA;odKDAWcJBJ7B=_{xRdiHmv1%xf5NI+S)UXR@pOh zz~Y`>&yyofeclndy(b}UiQdor3;WaSXKWqNR55P%x4zBz`JG*NC9M~oSx^~3DgMII zb;oYQHvQMU6(03 zjc6;^j6oPkCVvLd!s=_OPNv~X@_FS#nc)VxdpHo+NJ4~PzsF9J2>5%}f4oQWgg|>| z-c@d@x5~i!!7g=6CBxq@D4+5Yh%Nx|$^ldmKt8aSL=cCF*)IpGn7{`Mzbw$wDbr&` z=x^LO-zKiIh+dJEmNp}14_J5MS*QYcAp~4#8++F~c0+fR@7K{#H6C-0#%Z4fCpCtX zqCMI#p+*3+@$O1EzM5lG^?;jzyzF7BksfP6Xfd@|#+Na`eUy9tx)!3xK5W+HjAGoh z{w21)u>^J6v(jL0zri=2;ltfwQ9V?gUvnFtG;Kpm9BIfXcyvLzFaRpt@#pa5MmRV) zFc~>EOMt1eaIQ7L%g%dI7m1i{P`TujejBnz$v_={=UPA(s zL13ABY0eWxGqZZIE=jviVkSuC)HNf0PF0CNXRq*WYHH_6t!2UaIENa)iCO{9i}@vm zGVJ)_>$t74+DpKJj~Koa@rNW-Sy%q1TaGjeYXk)185mzLjjyi2M8U1IKN~?`&Ikh7 zUyIKD4;dJ}mKzp3!|ig~!C}O~>Eq2BIxi6&`5Mw})a@sA`C3TRt4{s@uuZWV_X=LQ zOK|>-6{NNW6RWG>Li@SUPj~2=T(Hg;mLKlZnpZ(<6W&zWKMq8y~Z2oYW{=-!@?n|IsxjwfTxRg(o zSwb)WH`qsC%e8zMzas&Eo}`CGmF+Zbf#4@`sQ(-5yqM7yR?IoH_Ta^y$;#H^Ni@^p zVqRkZ69j_-6qy^!TpMyIMXh(f8_OR8jS)0Z{ZP57$APKKT_5N1C6eb)j*eXkE1Ah| zvJIT8SPnn176(xqGHE)9)~EFLqXn+hKhvQ#(gi9^vUU}Kg)2!^y?T@@0oWs^InVCY zrUnTEn{wZFsA&w&8JM9As?ZL~?GDAcqSoayFf}zrW5GXFCi@RWbH$p~QAd9rV*vZp zxN#N8q>1Y4CaL|3X60MH3|pEd>74JoEowJAf+zi_Olq~}1_cZwusG#&r8ig+==bRJ zRaGT0WPrm)1Fj{~o!! zP1x8LpnAsuXDo?_$w`2G0j)(}R9}^G4d2hDl9uPt$C15*4Hm&|xotdypn>QhuE@v& zWVsn$237WaBbV&Gxm$`|>t`>#t61p0gTeohq!VjrlsTK*Cmt&3ZI}8ZNh-A!1oX_7 z_V07|u$UiJ7WSdCEXF&^)i+9Ow8P$_GH8xh0Lq&w?>&h0Y_YyIvN?|dnVq(cpXh#< z+2Rq1yAeiR1!n|uMt2@_uItp4ezIS%tD`!v#wxY+ZIHO_a`S-BF={yeSZ#6m^jkOO zXFj2B+UnIwQJ)+P_nAFQd9vf|itb?L8ePYz!C|{ykJpj;CoRf%gAHhb&tu+41ZhF7 zk4eMu+v|~JvxOTyhcU{ zTgE(op`9hL(;sQUu9f4eG32p6;!(@uLx z_DzlHry0m^eal$-MYqOz5s*oLeg?v-lPnHB@0EkS)XRir*VUSSe2W~7U&3RM8`-Yk zE@!s{3u@@#bFGL?4{hHSj41^z?)D#PG+Xrj?rhy{a7i3?Ot0072u6O_mYX$AtKLfI zO4#NbIo)Opm1iK>K=)E+ifwm963I(j)mxcdOXUk=JSjaj*Z%+~2>`dchumtUKc-RI zeA>bT4U)Lk;SFd;aNiAiSD5=j!Zh$cUp-*%cP8LU{B}Pp*mPIgEtr%g#2&nuVILK1 zy5wY=u2~E36WYz`d_@s?K%N^5yk@ z3)$Y6joOgafng#=RVkfszhYp3g?%8uhESh_#gD>mZkj1^Z8PNqi`9!oD!oE%ll09J`l75xeP! zIg^*(Qzps2g;#ZK>deVoZmcSV&eOXpJ?pTV;4W7D_rcocXM(FMJue{O#yRzSn=f2b zs%6Vfz=7())~v|;T?bc&mSgVBy(FqNgY#Z?E%(eL>de>0H|tD0w{`=JA#E|Y=Z}Wg zmKX@kq{+bCGUjQe^Z4#E-DvLUY!XGOmqn@7s)C2bR$0ypzWjE_?FQCm=nDS(P7moO zo&FF{l&p>V{2uC)_&b99uZ_=ET8b-Dml6W&Z9W`qQNx41S4`70ZQ~D8B4nRJKL<9D zc+TouH>%HnFm< zuCvf}_+|`^!jz(!1Mr>?xI5=z#jFv<8H7ZUBP^<-rZKJpHmCy+YZ7$5u9Q6D~ z(UzphBdDms)PY3#oJ$RBEH@RWr^LdOgA6jGS$~=cQ4iFH#e*onBWeAIf_G4GMm2cD z5+mUgHM}B!L4@2efZUn}ly^KC#18mBtT?OB8}kp99c6jhUl+FCn%xlKrKH<1#jnx$ z+*Eqn7&oU#=mM^Dn&}J=IL}GI?v75grgjP?9K<=OKUz4pntl@9zx&o4PI{3rpqgkc ze9piiX{uhZh?{S7Crui*YU-9B*j?b`Wiepdl#9Dr(C=SvR-(PL*BXP1s~e}vmEJz)`y#{HQ<>n$Sz6K$wU6?i}C^b)IgIh#eO>jAJ)kkbhf@|{P-R=f%v7{9I z&`*;*&2(X*U*UM+byf}>=0qOIjwwCLSU89at$lC+qps@=Dwe+vm(%Y2c2~aZR;-*W zec@XSmGNW;f#Ers#$5~P+GXq~+j7O2G{x7)YFmBXGCWS6pVr(a23Hu3J#(6^EK+JG z7l^fsXoQ+>-hbJjer`(BekZVM;1e_7W_6=xYtkC9pL;7`o;QRp_6&2zZfu8ZGzr=o za7Zp5`~?|^7o;^uo>dUSE-?`L`o85Rx=w9``kr49s;Q}=xRM z?H#RsxylwcchaFP^EOCgkQvJGboW!RX|Ura(OBB~Y#nqY$H(kL?TqjZcV7*>>B{7j zNs~fx6;9V#`<_ANuAdv`aKZx(Z1M2+z2)_B=Kc=%&ary-0y~k81NK10fsdS^&3u)y z81F`&j(N-)iDtYlh;NZ)Bf&Eagy$9HVw7I;gbMf-{Ysv4BaHMI2%N9*=W(~}nh*&) zt6viho(%{4bnS+#Y?hf1N$jk_F*7D;5|q$&tK4%(I1JMaQ1Gpj-o38xM>40N&d#f! zRttFRRk;cn-%cNzxzLDQ-4_?FeG$*Vw!-*8;?f;eE=gv0COP(w&B}Iy++p6U%3Ab_10#L4?i#A> zBC}}+yg!k%?yYx>HVin6epv$^P*INT&+;jG%pEVrtTnGZNI`mPjLQJA(yhhy2l`w8 zyzg$@H+$(9C#iy%O7)lzdyqHOHW31ew9Q%Xcu4lEUu!c55)fVW8Uz1#`J=T(-+0v2 z%YMUSnEyPvasHu^1gv>0Gh_U;t7F?xxa(NbU4FxwDcIMP@84vc<1xH~NpZtI&XWj( zHEG1;txrt0iQY3F9f^{Q9`^&Vy%KKSq1)4USWr*S8(7oB;uHfT6u*-y{29R8#S^4+ zMvC?xAF$KC>{Z(;Zu#2wt(C3cqJ6rSceg)sfi~9(0z_BLGGT5vHMQ(!I^nmc$rKE_P&mll>imfH>trR_NxR*fFXPH>P-7mm72`_p-YK6w|L7B8 z+h}3=ILyk&%H!*8EMe-ck}yE4L6vmu$(g%kjH_I5_A#DXWZ;~jDJp-mk+NW&XIbi} zt*cAaq}P$#D=3{_64t!Dq)4eNHj_*m6q^#l}`gmu0uanMMLbvrCl#OO%N%QsscCpqzY zFn}F7kK@=#=kxnfk6Oh6`S^lc<`ub4Y`If?dun?En`Ce449O^@%U&xHVQipyBF$cq zZyUy8PvOQho$FGo-M&T?6*&;gowrq1ZDUOv&(?d*XC_7`3&+%PT*8@@NOSe%!YteC z1-b{LbmgqO(Msvk=jQ~45%&FEL z2Xb>0?tSeob3Xs%1?*z){1jk<4;B2t<0`cgos5g><)wC^d z%@1M^?!rM*=2NVg@Zhm}2V>EBRjhpHt)Q>Hi&L^7D!x`WZq!l9;2Ul(Dd-ar;Tw3e{#}xIr#ZVz3yQfPG>~{uz+~=Z11NdTZU%1TyB6R#uig@rcKe$!`+h1jZP_E{r+DKJy|ER?2(m>Sb@6qz`Qy zF)6F)cTk&dJOub~$ENDUDi4_ZR|YSod_~%x4G)v&bvXLwU3OGinrFG}&;qM%zZ+l} zf>9}}+t%elXF2?FXgZ$qm?IO?!fKwdO1(4IrG24U4_+Ujeeahm`}zRNm>83Fo^`@# zr!E3g(^XkuBn_j68iaWYd4Z+gC`rQQSXO5%wS#VwZN@xjcaKp>J(#~8SHOx1^{;k; z5{O?42{(61Kz$$-Lb4bc8KE`tL{f3$SsU8jD02y`Vja!?lRVCVj<_qnR>!Z0Efx(f zvXFCaq07tRj`?=uhjqW9@C|!eJWFv%r_8VekSQ+3KwJSkQC^MW@ua!)le>l;RB{ju z;6Co=C&X@|PP3V@ag&G~Xr}~x+tSQankAOltSHPj)AKc79G0o+mw5Nb#(sj`S60^( zLw;*ohFyLrd}Vh69TIS*Eb!Cm+sSc^tv$<|GW(i5^+7k~4_Ug)JuP53m(I$i`6Hti z3gpaFwv&cU60p@M>R;1L*=G=~2>Um>+T{;sE@8qctAx0Y8(-Ot0^3%uS{o)ddwysL zBjGshh*@zP795yQQuTKNXwz4t#bltRflo9*7Y#C#nodcMqO@tWu~vYPaDH#|A7-_)||R=SAoX)5S-?Mm9j^j|6E2cQ^Tb$?NSjD};ezM(08$BpM} z#XSV3)cD7xjc0ns`MAt+<1i4V7}?}+S+XCLoB*@S^KZrK4ft8{Wd8=lK%?XZf$y%u zSa5!)oId-vdWa#hk=U*%&$vv^9GJ<|wQVby| zD*COLQ;%(Hr&lz6BKp{*3pri_%=tY5QPe#p}SI0%f`c6Kg6G+4AYvLGEE`b4ploJQHg6ymN8WA4#c|__v|S!qnkwD}&T@@wtG1$t*e9@(+dpCjj?25ck-8=9x?6nj1ZiW{6p1+v>VwsTq z4|{(4*%6FWNZ$Khh%+%}sk(pY&JCXt6Z#oKdmCE2X7Ca-vybH4x3JSC*S^)Br zK7|9xm^S(4ciRq0p3Xgl`r)RBE$O^9#3eXIdG_<8&CP&YBz+=jS>gZle;e&>|> zJqtQ=?+&`y-{tE3FoyRGAH5-1S7kZXn9*5o7HFF8AX`utDGTFnYqUKt=NP!>jgDG; zGU=92>{C?(y zT}d0G372qUGUwxCwbLDPz?+uQ+aXQW=HjsCP)JGndkcsVn5TmJDZ%b${w{rRG=QHG zTr)LYN>3F+MVEfp+b~~&q$+2EGrs*Yf9ga18(5u_n+*1PU&c*)Whq8C;9851Zj`1B zpo84wNGE%9g4TLD_-%NgJ|)G)C^C?34>+qo8(C%`*q4_kDKHY6d^PpK-|7hK&QsxZ)Y8?hSatCqe3DqM(@q{Dv2TYi>W5+Z zlNt8t>q_z2DfeV+qF zCNYL0>j$gk@9ORAWpJ?lJXKxWOYaVqfu&j)QXtYWSD=OyMXv!H#_5k;kwb0KD<8HQqZs8ZZpCoGa;xSQ z>(1O^Ih*$u>wJ!JA*|TiVrf_NLOz!U4g-`A=gfq};c21nGB628efT~Dqm2X)W>_k6 z>=>}Uqca!j2~!6^1zc-T!>D?EQI8Wu7D7w1Y|XTIk;cp71-d_Kbp24Wn28K3Vc_y? z(7NpWRcO;?S(eidwXaQc>cFeezntc@6KXjPF{`N5wg`ccv{uHHRf}3@Nak{1*f!%e z>teGbC=nGAV&U=r4>@kfSG=oCgo*~&XlT_MEVE7<)-D*pD6{)q4Syg^cPooyiQz8sYzWKf$~LVk zl6vqiM%(phVN+p7@pFTUZF9Sw22Upa3obK)Tdjg$c7{?KF)|hvwT>$k)lo4kj&q9e zso`nympm^Nc+C&bq})DH^t&~3z8L7p*h>f3PH;-#yftXpi6{uIuq0r}H4*4~C99uo zk7MKvarmClL#SKQitNwrlETmOHo4G`o_jgLG!<_lb}4<&qR0KUdr)$rA;Yx9%O-`S zW4Lj7dxyz6XHv}=cB0Wcn-3XOysv0XcM^unKZQ&dyWB&O!5#FHialp%XTfIEGg}6^ z_LFIR^^^cO2Z(zjwy}wwZgL6qRnL~LXC0+!>Cl@)#sYyMC9P*VQ5;rY+WL@OYlA~G z`Gss5Z&lkwuC=P~X|;oIXQlL>&5(e#zGhG@9oRnS3|{~6ea7PY?5o}~5eBdqV`t^m z#MSa=vao(Ww$;l~qaym5^OW?ss8XZt_kED~aUj`13Y?3+uesM_-|}}s`GUy_RDWV! zed%arQS>Tam$in?^hsMG`3fiBW^FnQu!QhAZvTo{jMd=xX8r7DiQP=RQVH0c1GCCG z$cka^-*<(A(zy)cUMJ`T4@hlZ(ZHdRZOusH`@R>4rY<)&)Bir9V+~vKl2PuAd4br+ zTheck59%_;okuP2%0@su?8zcPZ0h!5tctL>MS@zC9o-kJg<>iMag}8joNuz;Y>D1> zX^ZlMY9!5!EFAuB2*fa3_R}}(zCQHL8vSP##}TGH3Zvou7Gt;}#_dFLc-^=_%@WY!FpV#1{; z15imn{S?dsz8X~iD=be?f6)y$K1CZ1Q;Bvrk;!DhE%o5RK*YJZJ09jl&@p}6_E3AL zuNFFf_`?u{7EM*DxzMCUJJ0U1#$t1}P{61F?@FT-h^Y=@xMN$CWV7?aiW4N0Ec8jp zRRh>%G0{}JHEgW%ZZVQr=9U8wMd`^l_KtL1Z%y zkm>mBa6TlGwwCQp^WY~ZyZaSBk%de)ndS}^7PS%(p~bN?W+l?+)Vr&V%P^bwJE{ks zL4@M*Tde54`vVMw4NnjJH>FNidGDC#Hc;U)iiRQO8nC64&cRDZ^E!(Jy45tgMmfhG z^%- zN4`V0xf}JB2?;2n${q;1Ox3>3x`b&I$|j#{S((h75L>UtN0bZ|=cb>OOF+R7iwjsr zuH{%~E|gK*1Pe*Ql(MA%D-)>77EI)7pXClg-PDxoidG|Ikd>a+Tg+^nd7tCS6hSUs+ zPXHgQM%#tZ_S0oz*VcrK*C(_yT{E+0{e%CF`8i+piiX$cFi|^d9|NIoWA#>2F-qiw zZe4D`YR7O&fS{)naKdy}nW=V#E61Xl4dcG$%eCa@F#!#Q2+K>RHx1%otmoc9_xJM$8^m3*bAlMvU z1dN$-yT%9Ids)@??S1YQE=+1cx`IvAPN0UQq(*OV``cmoCaa3eq_Z?_=|ml*?yM~C z`q=h>CDKbviSu_Xtjd=LcFc3bh?#N8kTkKf@CcONSh+t4`x?Z>3213pu5MGdV{77G zU_O>2Hz1@EyM zQy@KXmg3YN>o2$EM~zp)X9GW4;S17ff#k|4X5&2D?QC?p%@UQ`_&cW-Wc`+jeL@Z) zX{F~}OW4E5TQ(;Rj@%D84`ink*TODWo2JxkZMkL?#nxEf^vkh{ez&zYFk*^ha}9Co zy`9wnG?5bV)7TplLnKO2%iVdtPzCdkwb7#@(wp_?8vJhoB81kq4Pcgs=8gl0hJUHB zNavHM^<~4;*Q5WHKzbuy>%H)Y$H|RjTNM^rjln)P&XM?SNz<(d1Hq*NZOJKt59WhW_ET>bS?WI+^VAQn zDySePd)Uko84`kUj^V1>iaCS5p9$^$hchE`#z2SW?S}-*50(!YqM)~>LJYXP_d>oN zrsX3JDmMUUvqB4gtZr@ zst7+dG~zOC^i30~y<~f<2i9nn!ZMBVU6+0StS3O-!a%}Gl#a{k(Bs0q+5$Uh45YMK z-f52jZ}SL@3Jv{K4pdBru`+_E-uf~z5T0tnC#xWaYdI-xwLo`wmD2&o#^;x~E@i#% z2Y_u;UDw%;*$I|&vt}V#pzR(*9UPY4FPb)akFeuRw#HvvMTiyHsR)dHfp^GVLd{lo zG)3VnTFC?_seL(KE{&qS{y0h}BPSob$W_WOXO$^ELPyZPukUrg zkm!#$sGj*fl_F@6Q!c_b{0zc{C}NbCh9wKiqTuh$8F6gKO7Vo&#m~!bG-2LeovKG~ zbkx{|`6>40S2RW(apQqr0a+sZ0=(Uis5)keCGk5w$lEZtd}XqcduRo?ODj)?D=%8y)=?Brt>c)BJmKR|ZBM zRhveTWh7Sa*f)Zy`99EqXlx+BXzT)VrJ6(+i<#s&pi-UmuCiZ18}AxmqgO=&`mx{B z;}+3(jj-EnJ$sVTx(Ajo!}oz^#2eHa^!9zh-k4(IK#&I9k1vtbrz~qrzw7vU*I_a* zh0l)NnN2?lVt)Pl>RRmC9-=aGJtZZ?Ix6zD_~C2{Rci6eu1>pERvcrfQ8I4K!Twx( z3qLRT<}W7z^e{@7QQF7ai(At)tdmZDt-*wH(%$I-SA-{%7dbH2n#7!nhM+gkgnMNL zIK#5uu95eHO4_WF${SKUL}@OQ!|@*9EmI{5mZ1Vu#eyQQuue15^n<3_StF{6)u1kJ zyh)iEHNL>LxNF?=#R}Oa8?+Kzkw#LSkY7Kl)JVRUq!bOaJUB(Iw5>tGtU)GFUX1S~fX2 zApF#tAK_!~T>e@CIV+GIM|c}uV}H{%Ij$^e&Ut~z-fL1ZfR$DpZL4QjVn#~Ct}Pw} z2qs%*-lO)8etImXSc+xS%oUbw9uiyTD@e{bc+^=LI{K&zmD?&USe{6oO^S06pg*0o zhHq&aZhiicA{rNm`8}XitQLx^^g4+WxdHfr_!H>Ub$(OBiUS(mCYGFrK}tFik-Zal zJZ6nLw;@!=vH==m5*RJ&;Iy^xqHGFGIrCqU^ldcrWL2}LPwg3&a*p)F7xuY80tmjO z{)0$Kt1I>ChWOG1L=Nk+=ImQDQkY61(B#Veu>4-_5=L)V2k%ifXWE;hbe*?f<1pPf z#;$AjATt%(aRvteX;xjtjdOEgq5zj}wR8>&sDT-+q_$W4u|<6KF9?9B9%$ePl;O$qI95Hl*omqtC zG&x13BHcC21^-wg%YPH_Qv$Zuoc_fTXcHp@R_?KiWsWzjLlSe#V1g4wpi3mMwP%wi z{`bl}RuGY0sNjth>=28P5a1;@+nw1m!Xl0i-a$p)+9dtVFjHe@Xs--?Tk1@#NR#Yi zwh!uc7VAmNIuX5%NDrFayRYzfQXIzf$NQ7S))bwnJjmIiSe}Wj1H_!gUJ?V^w6tW5 zGq}7usZ74aeK(9D;t%)Smw?SYgY>F#U=r2g2jMo>X76<8>O=%quGXq*DNbzDV1`Mb znLD=GJD*HvA;eK4d6fPk^3K4>q;Z8SQcVLd1N|1RK3dQ$Ss2L2-~D|IuM*K#kfOuT zR~b6@>_Q?LjJ|x{R~|`q_UIn(pmd*OnfVt583RYfCyIQ>iSc`|VGeHqrgB45~A3E3AW|Z19000On6G7G>xWGjPNGTLi-4z*PMK?wd^5BB`0-)QcDtX<(L_I5rYm6Z2=jK-qS( z=@>2vVCvS_*rr$$rKc6+Q_;I>fo<4|+JR9Y`ag(1aK?|NTSsK9^XVOBx&aSGF5LP) znSa7ZX(*r)C6X{(|5J%<|2QPdXkD0&^i$9`%Q$9?Zm@YnUAzhpNcfqyCLmoehn9($ z5FP{VDvVzgLNp7aLpJ!-d z{~;$|%-~kP`1O7V58K21&h4Qn;_+&YZ;$Bg!=J2l`JhlSFLzjurxMvNJgm`Au!GJx z3#gXpi;T5=HF>^sUT|#Plj=s7FHfOI$9$1+Q`p?$VmYY%nYE7$RRph{F?u6KeS@<# zPT*yX+Tx+uiXht3fE{_R(Gz;AVFxLyUi*oYMP$Vrvw6Wvx?P{e+IdzFVk6q*#O^*m zh!GO2i1h@jGv&Ud;kO&av8{7D4=qga+cIZpX14)#uq6gH#!%dspjkgJJ(h6}CYrfueD!ESN8raoMB`pp1o5$rh_x zkz+%ub3N}JrT4uz%q5~%PNpMEn;@Fz1@W7dBEfR5_NwFZnK}EnH!?u zRub0Q_B(|!kSX)f%*wY_H!qmx64{l|218AMG$%OZfxA@}*_%MWwU*k8sItIY<-&Tj z`isX~wVYKLz<=S!t8A!aDe>-BRoNS7H``KTH&0|cWCTc*=K?goHZRLZ0stV6SwLmyC`R zPe(!dL>?qPIcoSsB8mR?M%$1|`2pWgbJ8kT{ZZX;u&O}1 zhT!AR(lR(uh3`k3n$>^|yfngIG{B|dY`L-0UdHa1*Fw^!g2d)CG^-H+J!f3=>g46J zHRQu!`KIWp<8H<@`MKk*4R1~UOjZbvtsGg?g)&Qp3@6WP1H+|yO+bm+QA=xjy=61L zCiCh0pAX2)^^Mv}$pwYNNS6sXkJ6j(J)}F1fh}-)!BPICnU@ny3Sv62-DXWssgq7e zWt`G`YmqH%E8Gx}bL-=j`Z|a4Y0oHGG&+rI#BqvW=c>oLD<_XJeO>_L`!08+(cJBu z=rG25keKV}=r~a@xdlNGAX5fq@R1h zk6FfrAk?8-M?Sf+?FLM-H>>v8bQokOUE1GtWAzFxQDhlg4-LU-txT3}on5Rk)VdHJ-`4lz z5jW2F4pb9M{^=G5)lpLCRuWNmjk%yZ^-~1Z(7cjw|^{vSJa_mXQQ7c^*u^7yOn|xt8H@zHTWM9 zHtBM|*ZC6#1M6^NNKKZPILgXxVxn9|E{YD_;s5~;KNMd59Y?+OZ*9b7)QU({-2u{a zHRtAJc0x77rS)uR{4?#(4*;Q15C3HuIrT1o-mL}82TR{es78~cqeg7)X`)4(&8)iR zW6pK7#*NmzZx?>}@`h=)`NIgRWK7@LBKF~*14gWePh^O^`m?zuo(;b5kSAnpT5)WP zi1G2mhcV9nJ3VORTBd2MJFRjARW^|~K{$MJ<3FOB!?%1q-H>piXHv=Tz9}7U^M?s>XxSQR;!^j zAl5abf8`pR{!koUGZU`h3Vb=|LKo>$(HUU-=OuryjW8;at{)>slkmDrvM(T{qxFR| z#FDo)W}?6vwIRRjQ{cmqe6PRS&`*^IIDn7?8hCZQT(d(JSVu4X*GFu5N2bZe?^J zG%heMHD!e|WdHyJt4TybRA@u(SkG${K@^Uns95l4D@5C1lVBB8N?NkByGaTfi5BrD zS`h^W{Riw(5Nt2?(u-11km5m5=s~@B=*5G0@Kn%?DE(y>?5zg{OZ{H@raRlrCcBWB z9_E9O()VNLeLFL6ri~dG7#J8B7}P+aP}pzV_H7}Y``{4KvaEybZWw4j+jd5YLfFHW z<(6RHwCarr$8jv%c7K5@e%d`8(lB72AM588!hHr;G%=Y>W-Ah>y+NGs_&Q_w`A7(n z;IM|lvYY|%2JHaM|3}tg+qnxI5)N3(f+FH?Hk&`nuKy0y@W2z|K4Eh>K=~D!jFoxM z=DK0J{YZR1ksc4?w?umF!(nAEdP1C6Yz_w$e^n9koXvFu6^epaf;gK<_Hihk)$@vw z*K7_4RS1&D*<3e75h6scf5Qq{=l%S1 zHH;{Soq(b*DgH&wZVE8}uvd#S!RBy)5Z6mCQ`=qE6WgFJT}vfyb@>{{*>Q%r#BiX;BGfC- zf^e+YVb{}HLf&C6F+40Qe;mo5^n>czKp}HDD-|!|D8Pd)->r6(Y7E=8N~O{^DY_Ps n(^K^?LIVQ>0|NsCiCF9(24WmLZj7@800000NkvXXu0mjfG%4pv diff --git a/NAudioWin8Demo/Common/StandardStyles.xaml b/NAudioWin8Demo/Common/StandardStyles.xaml deleted file mode 100644 index 01fd46a8..00000000 --- a/NAudioWin8Demo/Common/StandardStyles.xaml +++ /dev/null @@ -1,1829 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Mouse - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/NAudioWin8Demo/DelegateCommand.cs b/NAudioWin8Demo/DelegateCommand.cs deleted file mode 100644 index ad109acc..00000000 --- a/NAudioWin8Demo/DelegateCommand.cs +++ /dev/null @@ -1,48 +0,0 @@ -using System; -using System.Windows.Input; - -namespace NAudioWin8Demo -{ - internal class DelegateCommand : ICommand - { - private readonly Action action; - private bool enabled; - - public DelegateCommand(Action action) - { - this.action = action; - this.enabled = true; - } - - public bool IsEnabled - { - get { return enabled; } - set - { - if (enabled != value) - { - enabled = value; - OnCanExecuteChanged(); - } - } - } - - public bool CanExecute(object parameter) - { - return enabled; - } - - public void Execute(object parameter) - { - action(); - } - - public event EventHandler CanExecuteChanged; - - protected virtual void OnCanExecuteChanged() - { - EventHandler handler = CanExecuteChanged; - if (handler != null) handler(this, EventArgs.Empty); - } - } -} \ No newline at end of file diff --git a/NAudioWin8Demo/MainPage.xaml b/NAudioWin8Demo/MainPage.xaml deleted file mode 100644 index c3ab63fa..00000000 --- a/NAudioWin8Demo/MainPage.xaml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - -