I copied some code somewhere in this forum that represents a wav file and wrote it into a MemoryStream. I try to play it when clicking on a button with some variables passed in: PlayBeep(8000,500,16383);
But the emulator crashes and says:
An exception of type 'System.InvalidOperationException' occurred in
Microsoft.Xna.Framework.ni.dll but was not handled in user code
Additional information: FrameworkDispatcher.Update has not been called
Here is the code:
public static void PlayBeep(UInt16 frequency, int msDuration, UInt16 volume = 16383)
{
var mStrm = new MemoryStream();
BinaryWriter writer = new BinaryWriter(mStrm);
const double TAU = 2 * Math.PI;
int formatChunkSize = 16;
int headerSize = 8;
short formatType = 1;
short tracks = 1;
int samplesPerSecond = 44100;
short bitsPerSample = 16;
short frameSize = (short)(tracks * ((bitsPerSample + 7) / 8));
int bytesPerSecond = samplesPerSecond * frameSize;
int waveSize = 4;
int samples = (int)((decimal)samplesPerSecond * msDuration / 1000);
int dataChunkSize = samples * frameSize;
int fileSize = waveSize + headerSize + formatChunkSize + headerSize + dataChunkSize;
// var encoding = new System.Text.UTF8Encoding();
writer.Write(0x46464952); // = encoding.GetBytes("RIFF")
writer.Write(fileSize);
writer.Write(0x45564157); // = encoding.GetBytes("WAVE")
writer.Write(0x20746D66); // = encoding.GetBytes("fmt ")
writer.Write(formatChunkSize);
writer.Write(formatType);
writer.Write(tracks);
writer.Write(samplesPerSecond);
writer.Write(bytesPerSecond);
writer.Write(frameSize);
writer.Write(bitsPerSample);
writer.Write(0x61746164); // = encoding.GetBytes("data")
writer.Write(dataChunkSize);
{
double theta = frequency * TAU / (double)samplesPerSecond;
// 'volume' is UInt16 with range 0 thru Uint16.MaxValue ( = 65 535)
// we need 'amp' to have the range of 0 thru Int16.MaxValue ( = 32 767)
double amp = volume >> 2; // so we simply set amp = volume / 2
for (int step = 0; step < samples; step++)
{
short s = (short)(amp * Math.Sin(theta * (double)step));
writer.Write(s);
}
}
mStrm.Seek(0, SeekOrigin.Begin);
SoundEffect mySoundEffect = SoundEffect.FromStream(mStrm);
//mySoundEffect.Play(); //Crashing here
writer.Close();
mStrm.Close();
}
Related
I have a 3D Photon Focus camera ( MV1-D2048x1088-3D06-760-G2-8) and I am using C# with the Pleora eBUS SDK version 5.1.1 on a Windows 10 machine. The camera is set to scan a laser line in LineFinder Mode, DataFormat3D = 2 and is returning the data (buffer Payload = 2 x 2048 = 4096 bytes). The payload seems correct. I want to save this data but I am having difficulty. How can I get the buffer into an array (or some structure) to save it to a file stream?
My code is using the .DataPointer parameter from the Pleora eBUS SDK but I am not understanding what it is doing. The Manual I have included HERE - MAN075_PhotonFocus
private unsafe static void ThreadProc(object aParameters)
{
object[] lParameters = (object[])aParameters;
MainForm lThis = (MainForm)lParameters[0];
for (;;)
{
if (lThis.mIsStopping)
{
// Signaled to terminate thread, return.
return;
}
PvBuffer lBuffer = null;
PvResult lOperationResult = new PvResult(PvResultCode.OK);
// Retrieve next buffer from acquisition pipeline
PvResult lResult = lThis.mStream.RetrieveBuffer(ref lBuffer, ref lOperationResult, 100);
if (lResult.IsOK)
{
// Operation result of buffer is OK, display.
if (lOperationResult.IsOK)
{
//lThis.displayControl.Display(lBuffer);
uint bSize = lBuffer.GetPayloadSize();
PvImage image1 = lBuffer.Image;
uint height1 = image1.Height;
uint width1 = image1.Width;
uint offx1 = image1.OffsetX;
uint offy1 = image1.OffsetY;
PvPixelType imgpixtype = image1.PixelType;
image1.Alloc(width1, (uint)2, imgpixtype);
byte *data_pnt = image1.DataPointer ;
byte[] MSB_array = new byte[(int)width1];
int buff_size = 2 * (int)width1;
byte[] pix_array = new byte[buff_size];
ulong tStamp = lBuffer.Timestamp;
string msgOut = (bSize.ToString() + " TimeStamp " + tStamp.ToString() + " width " + width1.ToString());
Console.WriteLine(msgOut);
for (int i = 0; i < width1; i++)
{
data_pnt += 0;
Console.Write((uint)*data_pnt);
MSB_array[i] = *data_pnt;
data_pnt += 1;
}
data_pnt += 1;
Console.WriteLine(height1.ToString());
for (int i = 0; i < width1; i++)
{
ushort msb1 = MSB_array[i];
ushort last_4 = (ushort)(*data_pnt & 0x0F);
int integer1 = (msb1 << 4)+(ushort)(*data_pnt>>4);
double dec_part = (float)last_4 / (float)16;
double val1 = (float)integer1 + dec_part;
Console.WriteLine(val1.ToString());
data_pnt += 1;
}
Console.WriteLine(height1.ToString());
}
else
{
uint bSize = lBuffer.GetPayloadSize();
ulong tStamp = lBuffer.Timestamp;
string msgOut = (bSize.ToString() + " BAD RESULT TimeStamp " + tStamp.ToString());
Console.WriteLine(msgOut);
}
// We have an image - do some processing (...) and VERY IMPORTANT,
// re-queue the buffer in the stream object.
lThis.mStream.QueueBuffer(lBuffer);
}
}
}
My current solution is to loop through the buffer by incrementing the pointer and save the bytes into a new array (MSB_array). The way this data is packed (see the attached image in the question) I had to read the next line and bitshift it over and add it to the byte in the MSB_array to get a
for (int i = 0; i < width1; i++)
{
data_pnt += 0;
Console.Write((uint)*data_pnt);
MSB_array[i] = *data_pnt;
data_pnt += 1;
}
data_pnt += 1;
Console.WriteLine(height1.ToString());
for (int i = 0; i < width1; i++)
{
ushort msb1 = MSB_array[i];
ushort last_4 = (ushort)(*data_pnt & 0x0F);
int integer1 = (msb1 << 4)+(ushort)(*data_pnt>>4);
double dec_part = (float)last_4 / (float)16;
double val1 = (float)integer1 + dec_part;
Console.WriteLine(val1.ToString());
data_pnt += 1;
}
I am only writing it out to the console now but the data is correct. There may be a better/faster way than the for loop using the pointer. That post would be appreciated.
I'm developping a simple program that analyses frequencies of audio files.
Using an fft length of 8192, samplerate of 44100, if I use as input a constant frequency wav file - say 65Hz, 200Hz or 300Hz - the output is a constant graph at that value.
If I use a recording of someone speaking, the frequencies has peaks as high as 4000Hz, with an average at 450+ish on a 90 seconds file.
At first I thought it was because of the recording being stereo sound, but converting it to mono with the exact same bitrate as the test files doesn't change much. (average goes down from 492 to 456 but that's still way too high)
Has anyone got an idea as to what could cause this ?
I think I shouldn't find the highest value but perhaps take either an average or a median value ?
EDIT : using the average of the magnitudes per 8192 bytes buffer and getting the index that's closest to that magnitude messes everything up.
This is the code for the handler of the event the Sample Aggregator fires when it has calculated fft for current buffer
void FftCalculated(object sender, FftEventArgs e)
{
int length = e.Result.Length;
float[] magnitudes = new float[length];
for (int i = 0; i < length / 2; i++)
{
float real = e.Result[i].X;
float imaginary = e.Result[i].Y;
magnitudes[i] = (float)(10 * Math.Log10(Math.Sqrt((real * real) + (imaginary * imaginary))));
}
float max_mag = float.MinValue;
float max_index = -1;
for (int i = 0; i < length / 2; i++)
if (magnitudes[i] > max_mag)
{
max_mag = magnitudes[i];
max_index = i;
}
var currentFrequency = max_index * SAMPLERATE / 8192;
Console.WriteLine("frequency be " + currentFrequency);
}
ADDITION : this is the code that reads and sends the file to the analysing part
using (var rdr = new WaveFileReader(audioFilePath))
{
var newFormat = new WaveFormat(Convert.ToInt32(SAMPLERATE/*44100*/), 16, 1);
byte[] buffer = new byte[8192];
var audioData = new AudioData(); //custom class for project
using (var conversionStream = new WaveFormatConversionStream(newFormat, rdr))
{
// Used to send audio in realtime, it's a timestamps issue for the graphs
// I'm working on fixing this, but it has lower priority so disregard it :p
TimeSpan audioDuration = conversionStream.TotalTime;
long audioLength = conversionStream.Length;
int waitTime = (int)(audioDuration.TotalMilliseconds / audioLength * 8192);
while (conversionStream.Read(buffer, 0, buffer.Length) != 0)
{
audioData.AudioDataBase64 = Utils.Base64Encode(buffer);
Thread.Sleep(waitTime);
SendMessage("AudioData", Utils.StringToAscii(AudioData.GetJSON(audioData)));
}
Console.WriteLine("Reached End of File");
}
}
This is the code that receives the audio data
{
var audioData = new AudioData();
audioData =
AudioData.GetStateFromJSON(Utils.AsciiToString(receivedMessage));
QueueAudio(Utils.Base64Decode(audioData.AudioDataBase64)));
}
followed by
var waveFormat = new WaveFormat(Convert.ToInt32(SAMPLERATE/*44100*/), 16, 1);
_bufferedWaveProvider = new BufferedWaveProvider(waveFormat);
_bufferedWaveProvider.BufferDuration = new TimeSpan(0, 2, 0);
{
void QueueAudio(byte[] data)
{
_bufferedWaveProvider.AddSamples(data, 0, data.Length);
if (_bufferedWaveProvider.BufferedBytes >= fftLength)
{
byte[] buffer = new byte[_bufferedWaveProvider.BufferedBytes];
_bufferedWaveProvider.Read(buffer, 0, _bufferedWaveProvider.BufferedBytes);
for (int index = 0; index < buffer.Length; index += 2)
{
short sample = (short)((buffer[index] | buffer[index + 1] << 8));
float sample32 = (sample) / 32767f;
sampleAggregator.Add(sample32);
}
}
}
}
And then the SampleAggregator fires the event above when it's done with the fft.
I'm trying to parse a wav file. I'm not sure if there can be multiple data chunks in a wav file, but I originally assumed there was only 1 since the wav file format description I was reading only mentioned there being 1.
But I noticed that the subchunk2size was very small (like 26) when the wav file being parsed was something like 36MB and the sample rate was 44100.
So I tried to parse it assuming there were multiple chunks, but after the 1st chunk, there was no subchunk2id to be found.
To go chunk by chunk, I was using the below code
int chunkSize = System.BitConverter.ToInt32(strm, 40);
int widx = 44; //wav data starts at the 44th byte
//strm is a byte array of the wav file
while(widx < strm.Length)
{
widx += chunkSize;
if(widx < 1000)
{
//log "data" or "100 97 116 97" for the subchunkid
//This is only getting printed the 1st time though. All prints after that are garbage
Debug.Log( strm[widx] + " " + strm[widx+1] + " " + strm[widx+2] + " " + strm[widx+3]);
}
if(widx + 8 < strm.Length)
{
widx += 4;
chunkSize = System.BitConverter.ToInt32(strm, widx);
widx += 4;
}else
{
widx += 8;
}
}
A .wav-File has 3 chunks:
Each chunk has a size of 4 Byte
The first chunk is the "RIFF"-chunk. It includes 8 Byte the filesize(4 Byte) and the name of the format(4byte, usually "WAVE").
The next chunk is the "fmt "-chunk (the space in the chunk-name is important). It includes the audio-format(2 Byte), the number of channels (2 Byte), the sample rate (4 Byte), the byte rate (4 Byte), blockalign (2 Byte) and the bits per sample (2 Byte).
The third and last chunk is the data-chunk. Here are the real data and the amplitudes of the samples. It includes 4 Byte for the datasize, which is the number of bytes for the data.
You can find further explanations of the properties of a .wav-file here.
From this knowledge I have already created the following class:
public sealed class WaveFile
{
//privates
private int fileSize;
private string format;
private int fmtChunkSize;
private int audioFormat;
private int numChannels;
private int sampleRate;
private int byteRate;
private int blockAlign;
private int bitsPerSample;
private int dataSize;
private int[][] data;//One array per channel
//publics
public int FileSize => fileSize;
public string Format => format;
public int FmtChunkSize => fmtChunkSize;
public int AudioFormat => audioFormat;
public int NumChannels => numChannels;
public int SampleRate => sampleRate;
public int ByteRate => byteRate;
public int BitsPerSample => bitsPerSample;
public int DataSize => dataSize;
public int[][] Data => data;
public WaveFile(string path)
{
FileStream fs = File.OpenRead(path);
LoadChunk(fs); //read RIFF Chunk
LoadChunk(fs); //read fmt Chunk
LoadChunk(fs); //read data Chunk
fs.Close();
}
private void LoadChunk(FileStream fs)
{
ASCIIEncoding Encoder = new ASCIIEncoding();
byte[] bChunkID = new byte[4];
fs.Read(bChunkID, 0, 4);
string sChunkID = Encoder.GetString(bChunkID);
byte[] ChunkSize = new byte[4];
fs.Read(ChunkSize, 0, 4);
if (sChunkID.Equals("RIFF"))
{
fileSize = BitConverter.ToInt32(ChunkSize, 0);
byte[] Format = new byte[4];
fs.Read(Format, 0, 4);
this.format = Encoder.GetString(Format);
}
if (sChunkID.Equals("fmt "))
{
fmtChunkSize = BitConverter.ToInt32(ChunkSize, 0);
byte[] audioFormat = new byte[2];
fs.Read(audioFormat, 0, 2);
this.audioFormat = BitConverter.ToInt16(audioFormat, 0);
byte[] numChannels = new byte[2];
fs.Read(numChannels, 0, 2);
this.numChannels = BitConverter.ToInt16(numChannels, 0);
byte[] sampleRate = new byte[4];
fs.Read(sampleRate, 0, 4);
this.sampleRate = BitConverter.ToInt32(sampleRate, 0);
byte[] byteRate = new byte[4];
fs.Read(byteRate, 0, 4);
this.byteRate = BitConverter.ToInt32(byteRate, 0);
byte[] blockAlign = new byte[2];
fs.Read(blockAlign, 0, 2);
this.blockAlign = BitConverter.ToInt16(blockAlign, 0);
byte[] bitsPerSample = new byte[2];
fs.Read(bitsPerSample, 0, 2);
this.bitsPerSample = BitConverter.ToInt16(bitsPerSample, 0);
}
if (sChunkID.Equals("data"))
{
dataSize = BitConverter.ToInt32(ChunkSize, 0);
data = new int[this.numChannels][];
byte[] temp = new byte[dataSize];
for (int i = 0; i < this.numChannels; i++)
{
data[i] = new int[this.dataSize / (numChannels * bitsPerSample / 8)];
}
for (int i = 0; i < data[0].Length; i++)
{
for (int j = 0; j < numChannels; j++)
{
if (fs.Read(temp, 0, blockAlign / numChannels) > 0)
{
if (blockAlign / numChannels == 2)
{ data[j][i] = BitConverter.ToInt32(temp, 0); }
else
{ data[j][i] = BitConverter.ToInt16(temp, 0); }
}
}
}
}
}
}
Needed using-directives:
using System;
using System.IO;
using System.Text;
This class reads all chunks byte per byte and sets the properties. You just have to initialize this class and it will return all properties of your selected wave-file.
In the reference you added I dont see any mention of the chunk size being repeated for each data chunk...
Try something like this:
int chunkSize = System.BitConverter.ToInt32(strm, 40);
int widx = 44; //wav data starts at the 44th byte
//strm is a byte array of the wav file
while(widx < strm.Length)
{
if(widx < 1000)
{
//log "data" or "100 97 116 97" for the subchunkid
//This is only getting printed the 1st time though. All prints after that are garbage
Debug.Log( strm[widx] + " " + strm[widx+1] + " " + strm[widx+2] + " " + strm[widx+3]);
}
widx += chunkSize;
}
I am trying to implement a function which takes an wav file, runs a 100th of a second worth of audio through the FFT by AForge. When I change the offset to alter where in the audio I am computing through the FFT, sometimes I will get results in which I can show in my graph but most of the time I get a complex array of NaN's. Why could this be?
Here is my code.
public double[] test()
{
OpenFileDialog file = new OpenFileDialog();
file.ShowDialog();
WaveFileReader reader = new WaveFileReader(file.FileName);
byte[] data = new byte[reader.Length];
reader.Read(data, 0, data.Length);
samepleRate = reader.WaveFormat.SampleRate;
bitDepth = reader.WaveFormat.BitsPerSample;
channels = reader.WaveFormat.Channels;
Console.WriteLine("audio has " + channels + " channels, a sample rate of " + samepleRate + " and bitdepth of " + bitDepth + ".");
float[] floats = new float[data.Length / sizeof(float)];
Buffer.BlockCopy(data, 0, floats, 0, data.Length);
size = 2048;
int inputSamples = samepleRate / 100;
int offset = samepleRate * 15 * channels;
int y = 0;
Complex[] complexData = new Complex[size];
float[] window = CalcWindowFunction(inputSamples);
for (int i = 0; i < inputSamples; i++)
{
complexData[y] = new Complex(floats[i * channels + offset] * window[i], 0);
y++;
}
while (y < size)
{
complexData[y] = new Complex(0, 0);
y++;
}
FourierTransform.FFT(complexData, FourierTransform.Direction.Forward);
double[] arr = new double[complexData.Length];
for (int i = 0; i < complexData.Length; i++)
{
arr[i] = complexData[i].Magnitude;
}
Console.Write("complete, ");
return arr;
}
private float[] CalcWindowFunction(int inputSamples)
{
float[] arr = new float[size];
for(int i =0; i<size;i++){
arr[i] = 1;
}
return arr;
}
A complex array of NaNs is usually the result of one of the inputs to the FFT being a NaN. To debug, you might check all the values in the input array before the FFT to make sure they are within some valid range, given the audio input scaling.
Question of the title.Recently using GDAL reading Terrasar—X data and dividing imaginary and real parts Like software NEST confuses me a lot.Any help and suggestion will be highly appreciated.Below is my implementation method:
string dataPath = #"E:\SARDATA\SampleData\TerraSar-X\SO_000009564_0002_1\SO_000009564_0002_1\TSX1_SAR__SSC______HS_S_SRA_20090223T204240_20090223T204241\TSX1_SAR__SSC______HS_S_SRA_20090223T204240_20090223T204241.xml";
Gdal.AllRegister();
Dataset dataset = Gdal.OpenShared(dataPath, Access.GA_ReadOnly);
Band band = dataset.GetRasterBand(1);
int xSize = band.XSize;
int ySize = band.YSize;
short[] realArray = new short[xSize * ySize];
short[] imgArray = new short[xSize * ySize];
if (band.DataType == DataType.GDT_CInt16)
{
short[] tmpArray = new short[2 * xSize * ySize];
band.ReadRaster(0, 0, xSize, ySize, tmpArray, xSize, ySize, 0, 0);
for (int i = 0; i < tmpArray.Length;i++ )
{
realArray[i] = tmpArray[i / 2];
imgArray[i] = tmpArray[i / 2 + 1];
}
tmpArray = null;
}
I think I have a solution to your problem. I also tried to read a complex TerraSAR-X and I encountered your answer.
The complex file format merges two Int16 for CInt16 and two Int32 for CInt32.
To read correctly the complex data you should split an Integer into two shorts. The correct reading should look like this:
string dataPath = #"E:\SARDATA\SampleData\TerraSar-X\SO_000009564_0002_1\SO_000009564_0002_1\TSX1_SAR__SSC______HS_S_SRA_20090223T204240_20090223T204241\TSX1_SAR__SSC______HS_S_SRA_20090223T204240_20090223T204241.xml";
Gdal.AllRegister();
Dataset dataset = Gdal.OpenShared(dataPath, Access.GA_ReadOnly);
Band band = dataset.GetRasterBand(1);
int xSize = band.XSize;
int ySize = band.YSize;
short[] realArray = new short[xSize * ySize];
short[] imgArray = new short[xSize * ySize];
if (band.DataType == DataType.GDT_CInt16)
{
short[] tmpArray = new short[xSize * ySize];
band.ReadRaster(0, 0, xSize, ySize, tmpArray, xSize, ySize, 0, 0);
for (int i = 0; i < tmpArray.Length;i++ )
{
int value = tmpArray[i];
realArray[i] = (short)(value>>16);
imgArray[i] = (short)(value & 0xffff);
}
tmpArray = null;
}