本文整理汇总了C#中IWaveProvider类的典型用法代码示例。如果您正苦于以下问题:C# IWaveProvider类的具体用法?C# IWaveProvider怎么用?C# IWaveProvider使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
IWaveProvider类属于命名空间,在下文中一共展示了IWaveProvider类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: ResamplerDmoStream
/// <summary>
/// WaveStream to resample using the DMO Resampler
/// </summary>
/// <param name="inputProvider">Input Stream</param>
/// <param name="outputFormat">Desired Output Format</param>
public ResamplerDmoStream(IWaveProvider inputProvider, WaveFormat outputFormat)
{
this.inputProvider = inputProvider;
this.inputStream = inputProvider as WaveStream;
this.outputFormat = outputFormat;
this.resampler = new Resampler();
if (!resampler.MediaObject.SupportsInputWaveFormat(0, inputProvider.WaveFormat))
{
throw new ArgumentException("Unsupported Input Stream format", "inputStream");
}
resampler.MediaObject.SetInputWaveFormat(0, inputProvider.WaveFormat);
if (!resampler.MediaObject.SupportsOutputWaveFormat(0, outputFormat))
{
throw new ArgumentException("Unsupported Output Stream format", "outputStream");
}
resampler.MediaObject.SetOutputWaveFormat(0, outputFormat);
if (inputStream != null)
{
position = InputToOutputPosition(inputStream.Position);
}
this.inputMediaBuffer = new MediaBuffer(inputProvider.WaveFormat.AverageBytesPerSecond);
this.outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond);
}
开发者ID:BGCX261,项目名称:ziggy-pro-editor-svn-to-git,代码行数:30,代码来源:ResamplerDmoStream.cs
示例2: LoadNextChunk
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired)
{
int sourceBytesRequired = samplePairsRequired;
sourceBuffer = BufferHelpers.Ensure(sourceBuffer, sourceBytesRequired);
sourceBytes = source.Read(sourceBuffer, 0, sourceBytesRequired);
offset = 0;
}
开发者ID:aljordan,项目名称:NAJAudio,代码行数:7,代码来源:Mono8SampleChunkConverter.cs
示例3: AddInputStream
/// <summary>
/// Add a new input to the mixer
/// </summary>
/// <param name="waveProvider">The wave input to add</param>
public void AddInputStream(IWaveProvider waveProvider)
{
if (waveProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
throw new ArgumentException("Must be IEEE floating point", "waveProvider.WaveFormat");
if (waveProvider.WaveFormat.BitsPerSample != 32)
throw new ArgumentException("Only 32 bit audio currently supported", "waveProvider.WaveFormat");
if (inputs.Count == 0)
{
// first one - set the format
int sampleRate = waveProvider.WaveFormat.SampleRate;
int channels = waveProvider.WaveFormat.Channels;
this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);
}
else
{
if (!waveProvider.WaveFormat.Equals(waveFormat))
throw new ArgumentException("All incoming channels must have the same format", "waveProvider.WaveFormat");
}
lock (inputs)
{
this.inputs.Add(waveProvider);
}
}
开发者ID:EnergonV,项目名称:BestCS,代码行数:29,代码来源:MixingWaveProvider32.cs
示例4: MediaFoundationTransform
/// <summary>
/// Constructs a new MediaFoundationTransform wrapper
/// Will read one second at a time
/// </summary>
/// <param name="sourceProvider">The source provider for input data to the transform</param>
/// <param name="outputFormat">The desired output format</param>
public MediaFoundationTransform(IWaveProvider sourceProvider, WaveFormat outputFormat)
{
this.outputWaveFormat = outputFormat;
this.sourceProvider = sourceProvider;
sourceBuffer = new byte[ComputeSourceBufferSize(sourceProvider)];
outputBuffer = new byte[ComputeOutputBufferSize(outputFormat)]; // we will grow this buffer if needed, but try to make something big enough
}
开发者ID:KarimLUCCIN,项目名称:NAudioCustom,代码行数:13,代码来源:MediaFoundationTransform.cs
示例5: ConvertWaveProviderIntoSampleProvider
/// <summary>
/// Helper function to go from IWaveProvider to a SampleProvider
/// Must already be PCM or IEEE float
/// </summary>
/// <param name="waveProvider">The WaveProvider to convert</param>
/// <returns>A sample provider</returns>
public static ISampleProvider ConvertWaveProviderIntoSampleProvider(IWaveProvider waveProvider)
{
ISampleProvider sampleProvider;
if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.Pcm)
{
// go to float
if (waveProvider.WaveFormat.BitsPerSample == 8)
{
sampleProvider = new Pcm8BitToSampleProvider(waveProvider);
}
else if (waveProvider.WaveFormat.BitsPerSample == 16)
{
sampleProvider = new Pcm16BitToSampleProvider(waveProvider);
}
else if (waveProvider.WaveFormat.BitsPerSample == 24)
{
sampleProvider = new Pcm24BitToSampleProvider(waveProvider);
}
else
{
throw new InvalidOperationException("Unsupported operation");
}
}
else if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
{
sampleProvider = new WaveToSampleProvider(waveProvider);
}
else
{
throw new ArgumentException("Unsupported source encoding");
}
return sampleProvider;
}
开发者ID:hexd0t,项目名称:Garm_Net,代码行数:39,代码来源:SampleProviderConverters.cs
示例6: RemoveInputStream
/// <summary>
/// Remove an input from the mixer
/// </summary>
/// <param name="waveProvider">waveProvider to remove</param>
public void RemoveInputStream(IWaveProvider waveProvider)
{
lock (inputs)
{
this.inputs.Remove(waveProvider);
}
}
开发者ID:EnergonV,项目名称:BestCS,代码行数:11,代码来源:MixingWaveProvider32.cs
示例7: AudioPlayer
public AudioPlayer(IWaveProvider provider)
{
_playbackDevice.Init(provider);
_playbackDevice.Play();
_playbackDevice.PlaybackStopped += (sender, args) => Console.WriteLine("Playback stopped: " + args.Exception);
}
开发者ID:kunnis,项目名称:MumbleSharp,代码行数:7,代码来源:ConsoleMumbleProtocol.cs
示例8: Init
/// <summary>
/// Initialises the WaveOut device
/// </summary>
/// <param name="waveProvider">WaveProvider to play</param>
public void Init(IWaveProvider waveProvider)
{
if (playbackState != PlaybackState.Stopped)
{
throw new InvalidOperationException("Can't re-initialize during playback");
}
if (hWaveOut != IntPtr.Zero)
{
// normally we don't allow calling Init twice, but as experiment, see if we can clean up and go again
// try to allow reuse of this waveOut device
// n.b. risky if Playback thread has not exited
DisposeBuffers();
CloseWaveOut();
}
this.callbackEvent = new AutoResetEvent(false);
this.waveStream = waveProvider;
int bufferSize = waveProvider.WaveFormat.ConvertLatencyToByteSize((DesiredLatency + NumberOfBuffers - 1) / NumberOfBuffers);
MmResult result;
lock (waveOutLock)
{
result = WaveInterop.waveOutOpenWindow(out hWaveOut, (IntPtr)DeviceNumber, waveStream.WaveFormat, callbackEvent.SafeWaitHandle.DangerousGetHandle(), IntPtr.Zero, WaveInterop.WaveInOutOpenFlags.CallbackEvent);
}
MmException.Try(result, "waveOutOpen");
buffers = new WaveOutBuffer[NumberOfBuffers];
playbackState = PlaybackState.Stopped;
for (int n = 0; n < NumberOfBuffers; n++)
{
buffers[n] = new WaveOutBuffer(hWaveOut, bufferSize, waveStream, waveOutLock);
}
}
开发者ID:EnergonV,项目名称:BestCS,代码行数:38,代码来源:WaveOutEvent.cs
示例9: Init
/// <summary>
/// Initialises the WaveOut device
/// </summary>
/// <param name="waveProvider">Wave provider to play</param>
public void Init(IWaveProvider waveProvider)
{
if (Thread.CurrentThread.ManagedThreadId != waveOutThread.ManagedThreadId)
{
lock (actionQueue)
{
actionQueue.Enqueue(new WaveOutAction(WaveOutFunction.Init, waveStream));
workAvailable.Set();
}
return;
}
waveStream = waveProvider;
int bufferSize = waveProvider.WaveFormat.ConvertLatencyToByteSize(desiredLatency);
//waveStream.GetReadSize((desiredLatency + 2) / 3);
numBuffers = 3;
MmException.Try(
WaveInterop.waveOutOpen(out hWaveOut, (IntPtr) devNumber, waveStream.WaveFormat, callback, IntPtr.Zero,
WaveInterop.CallbackFunction), "waveOutOpen");
buffers = new WaveOutBuffer[numBuffers];
playbackState = PlaybackState.Stopped;
var waveOutLock = new object();
for (int n = 0; n < numBuffers; n++)
{
buffers[n] = new WaveOutBuffer(hWaveOut, bufferSize, waveStream, waveOutLock);
}
}
开发者ID:teetow,项目名称:teevegas,代码行数:33,代码来源:WaveOutThreadSafe.cs
示例10: Init
public Task Init(IWaveProvider waveProvider)
{
// do this still on the gui thread
mediaElement.SetSource(new WaveProviderRandomAccessStream(waveProvider), "audio/wav");
// must be a better way than this
return new Task(() =>{});
}
开发者ID:EraYaN,项目名称:Moonstone,代码行数:7,代码来源:MediaElementOut.cs
示例11: MediaFoundationTransform
/// <summary>
/// Constructs a new MediaFoundationTransform wrapper
/// Will read one second at a time
/// </summary>
/// <param name="sourceProvider">The source provider for input data to the transform</param>
/// <param name="outputFormat">The desired output format</param>
public MediaFoundationTransform(IWaveProvider sourceProvider, WaveFormat outputFormat)
{
this.outputWaveFormat = outputFormat;
this.sourceProvider = sourceProvider;
sourceBuffer = new byte[sourceProvider.WaveFormat.AverageBytesPerSecond];
outputBuffer = new byte[outputWaveFormat.AverageBytesPerSecond + outputWaveFormat.BlockAlign]; // we will grow this buffer if needed, but try to make something big enough
}
开发者ID:Shadetheartist,项目名称:Numboard-2.0,代码行数:13,代码来源:MediaFoundationTransform.cs
示例12: LoadNextChunk
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired)
{
int sourceBytesRequired = samplePairsRequired * 6;
sourceBuffer = GetSourceBuffer(sourceBytesRequired);
sourceBytes = source.Read(sourceBuffer, 0, sourceBytesRequired);
offset = 0;
}
开发者ID:jayfar,项目名称:NAudio_1.5_Updates,代码行数:7,代码来源:Stereo24SampleChunkConverter.cs
示例13: VolumeWaveProvider16
public VolumeWaveProvider16(IWaveProvider sourceProvider)
{
this.sourceProvider = sourceProvider;
if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm)
throw new ArgumentException("Expecting PCM input");
if (sourceProvider.WaveFormat.BitsPerSample != 16)
throw new ArgumentException("Expecting 16 bit");
}
开发者ID:gauravkar,项目名称:NAudio,代码行数:8,代码来源:VolumeWaveProvider16.cs
示例14: SingleChannelMuxProvider
public SingleChannelMuxProvider(IWaveProvider sourceProvider, int channelNum, int totalChannels)
{
_source = sourceProvider;
_channelNum = channelNum;
_totalChannels = totalChannels;
_bytesPerSample = sourceProvider.WaveFormat.BitsPerSample / 8;
_finalFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, sourceProvider.WaveFormat.BitsPerSample, totalChannels);
}
开发者ID:JoeGilkey,项目名称:RadioLog,代码行数:8,代码来源:SingleChannelMuxProvider.cs
示例15: LoadNextChunk
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired)
{
int sourceBytesRequired = samplePairsRequired * 2;
sourceSample = 0;
sourceBuffer = BufferHelpers.Ensure(sourceBuffer, sourceBytesRequired);
sourceWaveBuffer = new WaveBuffer(sourceBuffer);
sourceSamples = source.Read(sourceBuffer, 0, sourceBytesRequired) / 2;
}
开发者ID:aljordan,项目名称:NAJAudio,代码行数:8,代码来源:Mono16SampleChunkConverter.cs
示例16: WaveToSampleProvider
/// <summary>
/// Initializes a new instance of the WaveToSampleProvider class
/// </summary>
/// <param name="source">Source wave provider, must be IEEE float</param>
public WaveToSampleProvider(IWaveProvider source)
: base(source)
{
if (source.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
{
throw new ArgumentException("Must be already floating point");
}
}
开发者ID:BGCX261,项目名称:ziggy-pro-editor-svn-to-git,代码行数:12,代码来源:WaveToSampleProvider.cs
示例17: EncodeToMp3
/// <summary>
/// Helper function to simplify encoding to MP3
/// By default, will only be available on Windows 8 and above
/// </summary>
/// <param name="inputProvider">Input provider, must be PCM</param>
/// <param name="outputFile">Output file path, should end with .mp3</param>
/// <param name="desiredBitRate">Desired bitrate. Use GetEncodeBitrates to find the possibilities for your input type</param>
public static void EncodeToMp3(IWaveProvider inputProvider, string outputFile, int desiredBitRate = 192000)
{
var mediaType = SelectMediaType(AudioSubtypes.MFAudioFormat_MP3, inputProvider.WaveFormat, desiredBitRate);
using (var encoder = new MediaFoundationEncoder(mediaType))
{
encoder.Encode(outputFile, inputProvider);
}
}
开发者ID:yancai,项目名称:AudioPlayerDemo,代码行数:15,代码来源:MediaFoundationEncoder.cs
示例18: LoadNextChunk
public void LoadNextChunk(IWaveProvider source, int samplePairsRequired)
{
int sourceBytesRequired = samplePairsRequired * 4;
sourceBuffer = GetSourceBuffer(sourceBytesRequired);
sourceWaveBuffer = new WaveBuffer(sourceBuffer);
sourceSamples = source.Read(sourceBuffer, 0, sourceBytesRequired) / 2;
sourceSample = 0;
}
开发者ID:jayfar,项目名称:NAudio_1.5_Updates,代码行数:8,代码来源:Stereo16SampleChunkConverter.cs
示例19: EncodeToMp3
/// <summary>
/// Helper function to simplify encoding to MP3
/// By default, will only be available on Windows 8 and above
/// </summary>
/// <param name="inputProvider">Input provider, must be PCM</param>
/// <param name="outputFile">Output file path, should end with .mp3</param>
/// <param name="desiredBitRate">Desired bitrate. Use GetEncodeBitrates to find the possibilities for your input type</param>
public static void EncodeToMp3(IWaveProvider inputProvider, string outputFile, int desiredBitRate = 192000)
{
var mediaType = SelectMediaType(AudioSubtypes.MFAudioFormat_MP3, inputProvider.WaveFormat, desiredBitRate);
if (mediaType == null) throw new InvalidOperationException("No suitable MP3 encoders available");
using (var encoder = new MediaFoundationEncoder(mediaType))
{
encoder.Encode(outputFile, inputProvider);
}
}
开发者ID:ryanbnl,项目名称:NAudio,代码行数:16,代码来源:MediaFoundationEncoder.cs
示例20: WaveFormatConversionProvider
/// <summary>
/// Create a new WaveFormat conversion stream
/// </summary>
/// <param name="targetFormat">Desired output format</param>
/// <param name="sourceProvider">Source Provider</param>
public WaveFormatConversionProvider(WaveFormat targetFormat, IWaveProvider sourceProvider)
{
this.sourceProvider = sourceProvider;
this.targetFormat = targetFormat;
conversionStream = new AcmStream(sourceProvider.WaveFormat, targetFormat);
preferredSourceReadSize = Math.Min(sourceProvider.WaveFormat.AverageBytesPerSecond, conversionStream.SourceBuffer.Length);
preferredSourceReadSize -= (preferredSourceReadSize% sourceProvider.WaveFormat.BlockAlign);
}
开发者ID:ActivePHOENiX,项目名称:NAudio,代码行数:15,代码来源:WaveFormatConversionProvider.cs
注:本文中的IWaveProvider类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论