I'm having some issues with naudio and saving sound recordings. The code I currently have works to the point where it saves the wav file, but when I open it up, Windows Media Player returns an error: "Windows Media Player encountered a problem while playing the file"
I have two buttons, a "Record" button, which turns into the stop button after it's pressed. And I have a "Save" button which when clicked, saves the recording to sample.wav.
NAudio.Wave.WaveIn sourceStream = null;
NAudio.Wave.DirectSoundOut waveOut = null;
NAudio.Wave.WaveFileWriter waveWriter = null;
private void recordButton_Click(object sender, EventArgs e)
{
int deviceNumber = sourceList.SelectedItems[0].Index;
sourceStream = new NAudio.Wave.WaveIn();
sourceStream.DeviceNumber = deviceNumber;
sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels);
NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream);
waveOut = new NAudio.Wave.DirectSoundOut();
waveOut.Init(waveIn);
sourceStream.StartRecording();
waveOut.Play();
recordButton.Visible = false;
stopRecord.Visible = true;
}
private void saveResponse_Click(object sender, EventArgs e)
{
int deviceNumber = sourceList.SelectedItems[0].Index;
string saveLocation = "c:\\wav\\sample.wav";
sourceStream = new NAudio.Wave.WaveIn();
sourceStream.DeviceNumber = deviceNumber;
sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels);
sourceStream.DataAvailable += new EventHandler<NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable);
waveWriter = new NAudio.Wave.WaveFileWriter(saveLocation, sourceStream.WaveFormat);
sourceStream.StartRecording();
MessageBox.Show("Recording successfully saved.");
}
private void sourceStream_DataAvailable(object sender, NAudio.Wave.WaveInEventArgs e)
{
if (waveWriter == null) return;
waveWriter.WriteData(e.Buffer, 0, e.BytesRecorded);
waveWriter.Flush();
}
private void stopRecord_Click(object sender, EventArgs e)
{
if (waveOut != null)
{
waveOut.Stop();
waveOut.Dispose();
waveOut = null;
}
if (sourceStream != null)
{
sourceStream.StopRecording();
sourceStream.Dispose();
sourceStream = null;
}
if (waveWriter != null)
{
waveWriter.Dispose();
waveWriter = null;
}
recordButton.Visible = true;
stopRecord.Visible = false;
saveResponse.Enabled = true;
}
Your recordButton_Click code isn't recording, it's piping data from a WaveIn to a WaveOut, which will play the data coming from your source (microphone) directly to the output (speakers). It doesn't retain that data for later use, it just pipes it from one to the other. If you want to subsequently save that data to disk, you need to buffer it yourself.
The saveResponse_Click on the other hand is starting the direct recording of data from the microphone to a wave file on disk. If you click your Save Response button, wait for a bit, then click your Stop button, you should get a recorded wave file.
If you want to record directly to disk, this is fine. If you want to record to memory, then optionally write to disk, then you need to save the data as it comes in. Perhaps use a memory stream to hold the data while recording, then write that to the WaveFileWriter when it comes time to save the file.
Here's the code I used for testing direct recording to a wave file on disk:
public WaveIn waveSource = null;
public WaveFileWriter waveFile = null;
private void StartBtn_Click(object sender, EventArgs e)
{
StartBtn.Enabled = false;
StopBtn.Enabled = true;
waveSource = new WaveIn();
waveSource.WaveFormat = new WaveFormat(44100, 1);
waveSource.DataAvailable += new EventHandler<WaveInEventArgs>(waveSource_DataAvailable);
waveSource.RecordingStopped += new EventHandler<StoppedEventArgs>(waveSource_RecordingStopped);
waveFile = new WaveFileWriter(#"C:\Temp\Test0001.wav", waveSource.WaveFormat);
waveSource.StartRecording();
}
private void StopBtn_Click(object sender, EventArgs e)
{
StopBtn.Enabled = false;
waveSource.StopRecording();
}
void waveSource_DataAvailable(object sender, WaveInEventArgs e)
{
if (waveFile != null)
{
waveFile.Write(e.Buffer, 0, e.BytesRecorded);
waveFile.Flush();
}
}
void waveSource_RecordingStopped(object sender, StoppedEventArgs e)
{
if (waveSource != null)
{
waveSource.Dispose();
waveSource = null;
}
if (waveFile != null)
{
waveFile.Dispose();
waveFile = null;
}
StartBtn.Enabled = true;
}
Related
I am trying to record audio stream from a microphone to file (wav format).
By default, the file is recorded in stereo, the IEEE Float codec, but I need to record audio in PCM-format (16 kHz, Mono)
Where should this format be used in this program code?
(variable need_wave_format)
The project uses NuGet CSCore.
Link to this sample project:
https://github.com/LordKmon/CsCoreRecordProblem
Code from winform:
public partial class Form1 : Form
{
private WasapiCapture m_SoundKeeper;
private IWriteable m_Writer;
private IWaveSource m_FinalSource;
public Form1()
{
InitializeComponent();
}
private void btn_StartRecord_Click(object sender, EventArgs event_args)
{
//Find and set Device (microphone)
MMDevice selected_device = null;
using (var deviceEnumerator = new MMDeviceEnumerator())
using (var deviceCollection = deviceEnumerator.EnumAudioEndpoints(DataFlow.Capture, DeviceState.Active))
{
selected_device = deviceCollection[0];
}
// Format that I needed
WaveFormat need_wave_format = new WaveFormat(16000, 16, 1, AudioEncoding.Pcm);
// Start record
m_SoundKeeper = new WasapiCapture();
m_SoundKeeper.Device = selected_device;
m_SoundKeeper.Initialize();
var soundInSource = new SoundInSource(m_SoundKeeper);
var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());
m_FinalSource = singleBlockNotificationStream.ToWaveSource();
m_Writer = new WaveWriter("output.wav", m_FinalSource.WaveFormat);
byte[] buffer = new byte[m_FinalSource.WaveFormat.BytesPerSecond / 2];
soundInSource.DataAvailable += (s, e) =>
{
int read;
while ((read = m_FinalSource.Read(buffer, 0, buffer.Length)) > 0)
m_Writer.Write(buffer, 0, read);
};
l_Status.Text = "RECORD !!!";
m_SoundKeeper.Start();
}
private void btn_Stop_Click(object sender, EventArgs e)
{
if (m_SoundKeeper == null)
return;
m_SoundKeeper.Stop();
m_SoundKeeper.Dispose();
m_SoundKeeper = null;
m_FinalSource.Dispose();
if (m_Writer is IDisposable)
((IDisposable)m_Writer).Dispose();
l_Status.Text = "...";
}
}
}
Where do I should to use variable "need_wave_format" in this code so that the output is a file of the Wav-PCM format?
I am trying to record the speaker sound to a wave file using NAudio's WasapiLoopbackCapture by writing the stream of bytes available. The WasapiLoopbackCapture.DataAvailable BytesRecorded will be 0 is there is no sound. however in my case i am getting bytecount in BytesRecorded even though the speakers are silent. could you please let me know whats wrong here.
class CallResponse
{
private WaveFileWriter _writer;
private WasapiLoopbackCapture _waveIn;
private string _inFile;
private string _inFileCompressed;
private int _duration;
public bool _isRecording;
public bool _speechDetected;
public CallResponse()
{
_inFile = #"C:\Naresh\test.wav";
_inFileCompressed = #"C:\Naresh\test16Hz.wav";
_waveIn = new WasapiLoopbackCapture();
_waveIn.DataAvailable += (s, e) =>
{
Console.WriteLine(e.BytesRecorded);
_writer.Write(e.Buffer, 0, e.BytesRecorded);
if (_writer.Position > _waveIn.WaveFormat.AverageBytesPerSecond * _duration)
{
Console.Write("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\bRecording stopped...");
_waveIn.StopRecording();
}
};
_waveIn.RecordingStopped += (s, e) =>
{
if (_writer != null)
{
_writer.Close();
_writer.Dispose();
_writer = null;
}
Console.Write("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\bCompressing Audio...");
using (var reader = new AudioFileReader(_inFile))
{
var resampler = new WdlResamplingSampleProvider(reader, 16000);
WaveFileWriter.CreateWaveFile16(_inFileCompressed, resampler);
}
_isRecording = false;
};
}
public void DisposeObjects()
{
if (_waveIn != null)
{
_waveIn.Dispose();
_waveIn = null;
}
}
public void StartRecording(int duration = 5)
{
_writer = new WaveFileWriter(_inFile, _waveIn.WaveFormat);
this._duration = duration;
_speechDetected = false;
_isRecording = true;
Console.WriteLine("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\bRecording....");
_waveIn.StartRecording();
}
}
if something is playing audio, then WasapiLoopbackCapture will capture that audio, even if it contains silence. So there's nothing particularly wrong or surprising that you are getting non-zero BytesRecorded values. In fact, if no applications are sending audio to the device being captured, then what typically happens is that you won't get any DataAvailable callbacks at all.
Im sending audio from micro to PC using dma(LAN-TCP):
while (1) {
U32 max;
int r,i;
main_TcpNet ();
if(tcpSend & sendBuffer)
{
if(selectBuffer)
{
send_datalog(ADC_RegularConvertedValueTab2,sizeof(ADC_RegularConvertedValueTab2));
sendBuffer = 0;
}
else
{
send_datalog(ADC_RegularConvertedValueTab,sizeof(ADC_RegularConvertedValueTab));
sendBuffer = 0;
}
main_TcpNet ();
}
}
}
I need to play it in real time .this is what Ive done so far using NAudio:
byte[] recBuff = new byte[1400];
public void OnDataReceived(IAsyncResult asyn)
{
try
{
SocketPacket theSockId = (SocketPacket)asyn.AsyncState;
int iRx = theSockId.thisSocket.EndReceive(asyn);
recBuff [count]= theSockId.dataBuffer[0];
count++;
if (count >= 1400)
{
//--------------------------------------------------------------------
for (int i = 0; i < 1400; i += 2)
recieveSound[i / 2] = recBuff[i] + (recBuff[i + 1] * 256); //turn back to 16bit
//--------------------------------------------------------------------
foreach(int data in recieveSound)
sound.Add(data);
//----------------------------------
if (playStauts)
{
if (firstplay)
{
IWaveProvider provider = new RawSourceWaveStream(
new MemoryStream(recBuff), new WaveFormat());
_waveOut.Init(provider);
_waveOut.Play();
//playThread.Start();
//firstplay = false;
}
}
else
{
player.Stop();
}
count = 0; //RESET THE RecBuff
}
//---------------------------------------------------------------
}
catch (ObjectDisposedException)
{
System.Diagnostics.Debugger.Log(0, "1", "\nOnDataReceived: Socket has been closed\n");
}
catch (SocketException se)
{
MessageBox.Show(se.Message);
}
}
private void exitToolStripMenuItem_Click(object sender, EventArgs e)
{
if (m_clientSocket != null)
{
m_clientSocket.Close();
m_clientSocket = null;
}
Close();
}
private void frmMain_Load(object sender, EventArgs e)
{
playThread = new Thread(new ThreadStart(play));
player = new SoundPlayer(filePath);
toolStriplbIP.Text = "Your IP: " + GetIP();
btnDisconnect.Enabled = false;
}
#region Palying Sound
private void btnPlay_Click(object sender, EventArgs e)
{
try
{
//Array.Clear(sound, 0, sound.Count);
buffCount = 0;
offsetSound = 0;
sound.Clear();
Object objData = "7";
byte[] byData = System.Text.Encoding.ASCII.GetBytes(objData.ToString());
if (m_clientSocket != null)
m_clientSocket.Send(byData);
playStauts = true;
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
private void btnPause_Click(object sender, EventArgs e)
{
playStauts = false;
}
#endregion
public void play()
{
while(true){
using (SoundPlayer player = new SoundPlayer(filePath))
{
//????????
}
}
}
I just hear like a bijilion Buzzes in sec. But When I save it and then play it I hear the song very clear and loud.
What is wrong? How can I play my byte array when its growing?
does this even work for me?
byte[] bytes = new byte[1400];
IWaveProvider provider = new RawSourceWaveStream(
new MemoryStream(bytes), new WaveFormat());
_waveOut.Init(provider);
_waveOut.Play();
For one thing, you're using the default WaveFormat, which may or may not be correct. Disagreement between source and destination formats will definitely cause you problems.
Once you're sure the WaveFormat is correct, I would suggest using a BufferedWaveProvider as the input to your wave player rather than the MemoryStream, something like this:
WaveFormat Format = new WaveFormat(/* fill in the right parameters here */);
BufferedWaveProvider Provider = new BufferedWaveProvider(Foramt);
Then, whenever you're happy with your recBuff, you just call Provider.AddSamples to drop the data into the BufferedWaveProvider, which will then be picked up by your WaveOut player.
There's some other strangeness going on. Are you receiving only one byte at a time? It looks like that's what your asynchronous handler is doing. This might not be the best thing, since that will result in lots and lots of context switching. If you're receiving more than one byte at a time, then you're only grabbing the first one and ignoring the rest. That will undoubtedly result in "unexpected" sounds during playback.
I will try my best to explain the issue clearly. I'm using the code from
http://msdn.microsoft.com/en-us/magazine/dn385710.aspx
to create a photo sharing app. I am able to FTP images to my server fine. The issue I'm facing is when I tried to create the app to preview the picture and then FTP it. The FTP will still transfer the file, but the file size is always 0 size. I'm not sure if this is a bug or possibly me not disposing a certain object before FTP. Below are my codes:
Page 1
BitmapImage bitmapImage;
public PhotoPreview()
{
InitializeComponent();
btnYes.Tap += btnYes_Tap;
imgPreview.Width = G.getScreenWidth();
imgPreview.Height = G.getScreenHeight() - 250;
//windows phone 8 bug. When bitmap image is set...it blocks the FTP process thread. Will perform FTP on seperate page
previewPhoto();
}
void previewPhoto()
{
bitmapImage = new BitmapImage();
bitmapImage.SetSource(G.myStream);
imgPreview.Source = bitmapImage;
}
private void btnYes_Tap(object sender, System.Windows.Input.GestureEventArgs e)
{
disposeImage(bitmapImage);
NavigationService.Navigate(new Uri("/PhotoFTP.xaml", UriKind.Relative));
}
private void disposeImage(BitmapImage img)
{
if (img != null)
{
try
{
using (var ms = new MemoryStream(new byte[] { 0x0 }))
{
img = new BitmapImage();
img.SetSource(ms);
}
}
catch (Exception e)
{
System.Diagnostics.Debug.WriteLine("ImageDispose FAILED " + e.Message);
}
}
}
Page 2
const string
IP_ADDRESS = "888.88.888",
FTP_USERNAME = "test",
FTP_PASSWORD = "test123"
;
string filename;
FtpClient ftpClient = null;
TestLogger logger = null;
public PhotoFTP()
{
InitializeComponent();
DateTime thisDay = DateTime.Today;
string timestamp = thisDay.Hour.ToString() + "_" + thisDay.Minute.ToString() + "_" + thisDay.Second.ToString();
filename = timestamp + ".jpg";
}
protected override void OnNavigatedTo(NavigationEventArgs e)
{
Test_connect();
}
private async void Test_connect()
{
logger = TestLogger.GetDefault(this.Dispatcher);
lstLogs.ItemsSource = logger.Logs;
ftpClient = new FtpClient(IP_ADDRESS, this.Dispatcher);
ftpClient.FtpConnected += ftpClient_FtpConnected;
ftpClient.FtpFileUploadSucceeded += ftpClient_FtpFileUploadSucceeded;
ftpClient.FtpFileUploadFailed += ftpClient_FtpFileUploadFailed;
ftpClient.FtpAuthenticationSucceeded += ftpClient_FtpAuthenticationSucceeded;
ftpClient.FtpAuthenticationFailed += ftpClient_FtpAuthenticationFailed;
logger = TestLogger.GetDefault(this.Dispatcher);
await ftpClient.ConnectAsync();
logger.AddLog("Connecting...");
}
async void ftpClient_FtpConnected(object sender, EventArgs e)
{
logger.AddLog("Preparing...");
await (sender as FtpClient).AuthenticateAsync(FTP_USERNAME, FTP_PASSWORD);
}
private async void Test_upload()
{
logger.AddLog("Uploading photo...");
await ftpClient.UploadFileAsync(G.myStream, "username_timestamp.jpg");
}
void ftpClient_FtpAuthenticationFailed(object sender, EventArgs e)
{
logger.AddLog("Connection error.");
}
void ftpClient_FtpAuthenticationSucceeded(object sender, EventArgs e)
{
logger.AddLog("Connection established.");
Test_upload();
}
void ftpClient_FtpFileUploadFailed(object sender, FtpFileTransferFailedEventArgs e)
{
logger.AddLog("Failed.");
}
Boolean firstTime = true;
void ftpClient_FtpFileUploadSucceeded(object sender, FtpFileTransferEventArgs e)
{
logger.AddLog("Completed.");
}
If I comment out the line previewPhoto(); in page 1, it will FTP the file to my server fine. I believe the issue is the
bitmapImage.SetSource(G.myStream);
I've also tried to create two separate stream. One to preview the photo and the other to FTP. The result still resulted in 0 size file when FTP to the my server.
It's because the BitmapImage reads to the end of the stream, so the FtpClient has no data to read/upload.
Use Stream.Seek to reset the stream pointer back to the beginning.
G.myStream.Seek(0, SeekOrigin.Begin);
I'm experimenting on how to play mp3 using Naudio. My simple app has one windows form and one button to play/pause the music. The app however has two major problem:
While it was intended that if the music is playing and the play button is pressed, the app should stop playing. Instead when the button is re-pressed, the app restart the music and then (sometime) throw an exception
If the button is pressed two or three times (and without any delay) ,the app throw a NAudio.MmException (Message=InvalidParameter calling acmStreamClose)
Can someone tell me what's wrong with my code? Below is my code:
using System;
using System.Windows.Forms;
namespace NaudioTesting
{
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
private NAudio.Wave.BlockAlignReductionStream stream = null;
private NAudio.Wave.DirectSoundOut output = null;
public void LoadFile(string filePath)
{
DisposeWave();
if (filePath.EndsWith(".mp3"))
{
NAudio.Wave.WaveStream pcm =
NAudio.Wave.WaveFormatConversionStream.CreatePcmStream(new NAudio.Wave.Mp3FileReader(filePath));
stream = new NAudio.Wave.BlockAlignReductionStream(pcm);
}
else if (filePath.EndsWith(".wav"))
{
NAudio.Wave.WaveStream pcm = new NAudio.Wave.WaveChannel32(new NAudio.Wave.WaveFileReader(filePath));
stream = new NAudio.Wave.BlockAlignReductionStream(pcm);
}
else throw new InvalidOperationException("Not a correct audio file type.");
output = new NAudio.Wave.DirectSoundOut();
output.Init(stream);
output.Play();
}
private void playPauseButton_Click(object sender, EventArgs e)
{
string filePath = "GetLoud.mp3";
LoadFile(filePath);
if (output != null)
{
if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing) output.Pause();
else if (output.PlaybackState == NAudio.Wave.PlaybackState.Paused) output.Play();
}
}
private void DisposeWave()
{
try
{
if (output != null)
{
if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing) output.Stop();
output.Dispose();
output = null;
}
if (stream != null)
{
stream.Dispose();
stream = null;
}
}
catch (NAudio.MmException)
{
throw;
}
}
private void Form1_FormClosing(object sender, FormClosingEventArgs e)
{
DisposeWave();
}
}
}
Looking at the DirectSoundOut source, the implementation for Play and Pause doesn't support resuming. Namely, what happens to you is exactly what it should. Calling play will always start from begining of the stream.
You should use WaveOut instead. It supports resuming by calling Play again, just like what you have in your code.
output = new NAudio.Wave.WaveOut();