How to fill a byte[] with 0xFF bytes? - c#

This question is a follow-up of this Question
I have written a small method to fill a byte[] just like the MemoryStream :
public static Stream FillWithPadding(Stream MS, int Count)
{
byte[] buffer = new byte[64];
for (int i = 0; i < buffer.Length; i++)
{
buffer[i] = 0xFF;
}
while (Count > buffer.Length)
{
MS.Write(buffer, 0, buffer.Length);
Count -= buffer.Length;
}
MS.Write(buffer, 0, Count);
return MS;
}
public static byte[] FillWithPadding(byte[] Buffer, int Count)
{
using (MemoryStream MS = new MemoryStream())
{
MS.Write(Buffer, 0, Buffer.Length);
MemoryStream msw = FillWithPadding(MS, Count) as MemoryStream;
return msw.GetBuffer();
}
}
This code is not working!!
Instead it is creating 0xFF + 0x00 at the End!
Can anyone Please clear-up, Why this does not work??

MemoryStream.GetBuffer() returns internal byte array that MemoryStream uses to store data. Initially it filled with 0, and filled up to stream length by Write's.
Most likely you want to use MemoryStream.ToArray() instead that returns copy of the buffer truncated to actual length.

That's the position in the Stream Now.
use MS.Position = 0 before Write.
public static Stream FillWithPadding(Stream MS, int Count)
{
byte[] buffer = new byte[64];
for (int i = 0; i < buffer.Length; i++)
{
buffer[i] = 0xFF;
}
MS.Position = 0;
while (Count > buffer.Length)
{
MS.Write(buffer, 0, buffer.Length);
Count -= buffer.Length;
}
MS.Write(buffer, 0, Count);
return MS;
}

Related

How to convert in fly ADPCM wav file in PSM wave file in c#

i have next code to convert ADPCM wav in PCM wave how you can do without intermediate storage file to disk here WaveFileWriter.CreateWaveFile("D:\\pcm16000.wav", upsampler);
public static byte[] ADPcmToPcm(string waveFileName, int bitRate = 128)
{
using (var reader = new WaveFileReader(waveFileName))
using (var converter = WaveFormatConversionStream.CreatePcmStream(reader))
using (var upsampler = new WaveFormatConversionStream(new WaveFormat(16000, converter.WaveFormat.Channels), converter))
{
WaveFileWriter.CreateWaveFile("D:\\pcm16000.wav", upsampler);
FileStream fs = new FileStream("d:\\pcm16000.wav", FileMode.Open, FileAccess.Read);
byte [] bytes = new byte[fs.Length];
int bytesToRead = (int)fs.Length;
int bytesRead = 0;
while (bytesToRead > 0)
{
int n = fs.Read(bytes, bytesRead, bytesToRead);
if (n == 0) break;
bytesRead += n;
bytesToRead -= n;
}
bytesToRead = bytes.Length;
return bytes;
}
}

how to add New Line while writing byte array to file

Hi I am reading a audio file into a byte array. then i want to read every 4 bytes of data from that byte array and write it into another file.
I am able to do this but, my problem is i want to add new line aft every 4 byte of data is written to file. how to do that??
Here is my code...
FileStream f = new FileStream(#"c:\temp\MyTest.acc");
for (i = 0; i < f.Length; i += 4)
{
byte[] b = new byte[4];
int bytesRead = f.Read(b, 0, b.Length);
if (bytesRead < 4)
{
byte[] b2 = new byte[bytesRead];
Array.Copy(b, b2, bytesRead);
arrays.Add(b2);
}
else if (bytesRead > 0)
arrays.Add(b);
fs.Write(b, 0, b.Length);
}
Any suggestions please.
I think this might be the answer to your question:
byte[] newline = Encoding.ASCII.GetBytes(Environment.NewLine);
fs.Write(newline, 0, newline.Length);
So your code should be something llike this:
FileStream f = new FileStream("G:\\text.txt",FileMode.Open);
for (int i = 0; i < f.Length; i += 4)
{
byte[] b = new byte[4];
int bytesRead = f.Read(b, 0, b.Length);
if (bytesRead < 4)
{
byte[] b2 = new byte[bytesRead];
Array.Copy(b, b2, bytesRead);
arrays.Add(b2);
}
else if (bytesRead > 0)
arrays.Add(b);
fs.Write(b, 0, b.Length);
byte[] newline = Encoding.ASCII.GetBytes(Environment.NewLine);
fs.Write(newline, 0, newline.Length);
}
Pass System.Environment.NewLine to the filestream
For more info http://msdn.microsoft.com/en-us/library/system.environment.newline.aspx

C# Compress Triple Byte Array

I currently got this script, which compresses byte arrays.
But I need it rewritten, so it can compress triple byte arrays [,,]
Thanks!
public static byte[] Compress(byte[] buffer)
{
MemoryStream ms = new MemoryStream();
GZipStream zip = new GZipStream(ms, CompressionMode.Compress, true);
zip.Write(buffer, 0, buffer.Length);
zip.Close();
ms.Position = 0;
MemoryStream outStream = new MemoryStream();
byte[] compressed = new byte[ms.Length];
ms.Read(compressed, 0, compressed.Length);
byte[] gzBuffer = new byte[compressed.Length + 4];
Buffer.BlockCopy(compressed, 0, gzBuffer, 4, compressed.Length);
Buffer.BlockCopy(BitConverter.GetBytes(buffer.Length), 0, gzBuffer, 0, 4);
return gzBuffer;
}
public static byte[] Decompress(byte[] gzBuffer)
{
MemoryStream ms = new MemoryStream();
int msgLength = BitConverter.ToInt32(gzBuffer, 0);
ms.Write(gzBuffer, 4, gzBuffer.Length - 4);
byte[] buffer = new byte[msgLength];
ms.Position = 0;
GZipStream zip = new GZipStream(ms, CompressionMode.Decompress);
zip.Read(buffer, 0, buffer.Length);
return buffer;
}
Update: I rewrote the code, it is running much faster now and the code is cleaner. Just tested it with some random data (see end of this post).
The Compression method:
public static byte[] Compress(byte[, ,] uncompressed)
{
if (uncompressed == null)
throw new ArgumentNullException("uncompressed",
"The given array is null!");
if (uncompressed.LongLength > (long)int.MaxValue)
throw new ArgumentException("The given array is to large!");
using (MemoryStream ms = new MemoryStream())
using (GZipStream gzs = new GZipStream(ms, CompressionMode.Compress))
{
// Save sizes of the dimensions
for (int dim = 0; dim < 3; dim++)
gzs.Write(BitConverter.GetBytes(
uncompressed.GetLength(dim)), 0, sizeof(int));
// Convert byte[,,] to byte[] by just blockcopying it
// I know, some pointer-magic/unmanaged cast wouldnt
// have to copy it, but its cleaner this way...
byte[] data = new byte[uncompressed.Length];
Buffer.BlockCopy(uncompressed, 0, data, 0, uncompressed.Length);
// Write the data to the stream to compress it
gzs.Write(data, 0, data.Length);
gzs.Close();
// Get the compressed byte array back
return ms.ToArray();
}
}
The Decompression method:
public static byte[, ,] Decompress(byte[] compressed)
{
if (compressed == null)
throw new ArgumentNullException("compressed",
"Data to decompress cant be null!");
using (MemoryStream ms = new MemoryStream(compressed))
using (GZipStream gzs = new GZipStream(ms, CompressionMode.Decompress))
{
// Read the header and restore sizes of dimensions
byte[] dimheader = new byte[sizeof(int) * 3];
gzs.Read(dimheader, 0, dimheader.Length);
int[] dims = new int[3];
for (int j = 0; j < 3; j++)
dims[j] = BitConverter.ToInt32(dimheader, sizeof(int) * j);
// Read the data into a buffer
byte[] data = new byte[dims[0] * dims[1] * dims[2]];
gzs.Read(data, 0, data.Length);
// Copy the buffer to the three-dimensional array
byte[, ,] uncompressed = new byte[dims[0], dims[1], dims[2]];
Buffer.BlockCopy(data, 0, uncompressed, 0, data.Length);
return uncompressed;
}
}
The test code:
Random rnd = new Random();
// Create a new randomly big array, fill it with random data
byte[, ,] uncomp = new byte[rnd.Next(70, 100),
rnd.Next(70, 100), rnd.Next(70, 100)];
for (int x = 0; x < uncomp.GetLength(0); x++)
for (int y = 0; y < uncomp.GetLength(1); y++)
for (int z = 0; z < uncomp.GetLength(2); z++)
uncomp[x, y, z] = (byte)rnd.Next(30, 35);
// Compress and Uncompress again
Stopwatch compTime = new Stopwatch(), uncompTime = new Stopwatch();
compTime.Start();
byte[] comp = Compress(uncomp);
compTime.Stop();
uncompTime.Start();
byte[, ,] uncompagain = Decompress(comp);
uncompTime.Stop();
// Assert all dimension lengths and contents are equal
for (int j = 0; j < 3; j++)
Debug.Assert(uncomp.GetLength(j) == uncompagain.GetLength(j));
for (int x = 0; x < uncomp.GetLength(0); x++)
for (int y = 0; y < uncomp.GetLength(1); y++)
for (int z = 0; z < uncomp.GetLength(2); z++)
Debug.Assert(uncomp[x, y, z] == uncompagain[x, y, z]);
Console.WriteLine(string.Format("Compression: {0}ms, " +
"Decompression: {1}ms, Ratio: {2}% ({3}/{4} bytes)",
compTime.ElapsedMilliseconds, uncompTime.ElapsedMilliseconds,
(int)((double)comp.LongLength / (double)uncomp.LongLength * 100),
comp.LongLength, uncomp.LongLength));
Output, for example:
Compression: 77ms, Decompression: 23ms, Ratio: 41% (191882/461538 bytes)

How to do RLE (run length encoding) in C# on a byte array?

I am trying to XOR two bitmap files (their byte arrays) to produce a byte array that can be used to change image A into image B or vice versa. I am sending this over the network so I would like to do some basic compression before this happens.
Is there a way to do RLE (run length encoding) in C# (using a built-in, or fast reliable 3rd party library) on a byte array for this purpose?
Notes:
If you are going to suggest an alternative to my approach please keep in mind that the decompression and transformation on the remote machine has to be as quick and efficient as possible.
I usually use GZipStream. It's fast enough and works fine.
class Compressor
{
public static byte[] compress(byte[] buffer)
{
MemoryStream ms = new MemoryStream();
GZipStream zip = new GZipStream(ms, CompressionMode.Compress, true);
zip.Write(buffer, 0, buffer.Length);
zip.Close();
ms.Position = 0;
byte[] compressed = new byte[ms.Length];
ms.Read(compressed, 0, compressed.Length);
byte[] gzBuffer = new byte[compressed.Length + 4];
Buffer.BlockCopy(compressed, 0, gzBuffer, 4, compressed.Length);
Buffer.BlockCopy(BitConverter.GetBytes(buffer.Length), 0, gzBuffer, 0, 4);
return gzBuffer;
}
public static byte[] decompress(byte[] gzBuffer)
{
MemoryStream ms = new MemoryStream();
int msgLength = BitConverter.ToInt32(gzBuffer, 0);
ms.Write(gzBuffer, 4, gzBuffer.Length - 4);
byte[] buffer = new byte[msgLength];
ms.Position = 0;
GZipStream zip = new GZipStream(ms, CompressionMode.Decompress);
zip.Read(buffer, 0, buffer.Length);
return buffer;
}
}
Here are two methods of RLE string packing/unpacking. The best result will be seen on a big text.
public string PackText()
{
try
{
StringBuilder str1 = new StringBuilder(Text);
StringBuilder str = new StringBuilder();
char ch;
int i, k, j;
for (i = 0; i < str1.Length; ) // from 0 to length of unpackedtext
{
ch = str1[i]; // get current char from str1
k = 0; //count the number of repeated characters
if (i == str1.Length - 1) // If this is the last character
{
str.Append(ch);
break; //exit the loop
}
if (str1[i + 1] == ch) //if current symbol is next
{
for (j = i; j < str1.Length; j++) //packing the characters
{
if (str1[j] == ch) //if current symbol is next
{
if (k == 9) break; //the maximum number of characters packed 9,
//or there might be problems with unpacking is not packed numeric characters
k++;
}
else break;
}
i = j;
}
else if ("0123456789".Contains(ch)) //if this digit and it is not repeated, then it must be escaped,
//so when unpacking to understand that this is not the number of repeated characters
{
k = 1;
i++;
}
else i++;
if (k != 0)
str.AppendFormat("{0}{1}", k, ch); //forming packed string
else
str.Append(ch);
}
return str.ToString();
}
catch
{
return null;
}
}
public string UnpackText()
{
try
{
StringBuilder str1 = new StringBuilder(Text);
StringBuilder str = new StringBuilder();
char ch;
char symb = 'a';
int s = 0;
int i, j;
for (i = 0; i < str1.Length; ) // from 0 to length of packedtext
{
ch = str1[i]; // get current char from str1
s = 0;
if ("123456789".Contains(ch)) //if this digit
{
if (i == str1.Length - 1) // If this is the last character
{
symb = ch;
s = 1;
i++;
}
else
{
symb = str1[i + 1]; // get packed symbol
i += 2;
s = Convert.ToInt32(ch) - 48; //get the number of repetitions
}
}
else
{
s = 0;
i++;
}
if (s > 0)
{
for (j = 0; j < s; j++) // write the decompressed symbol
str.Append(symb);
}
else
str.Append(ch);
}
return str.ToString();
}
catch
{
return null;
}
}
You can use the RLE encoding/decoding tool from CodePlex: http://rle.codeplex.com/ ( C# )
You may build as .net dll and its lib

Chunkinfying stream. Is code correct? Need a second set of eyes

Can anyone see any obvious holes in my logic here. Basically I need to break up a byte array into chunks of 10,000 before sending it out:
byte [] bytes = GetLargePieceOfData();
Stream stream = CreateAStream();
if (bytes.Length > 10000)
{
int pos = 0;
int chunkSize = 10000;
while (pos < bytes.Length)
{
if (pos + chunkSize > bytes.Length)
chunkSize = bytes.Length - pos;
stream.Write(bytes, pos, chunkSize);
pos += chunkSize;
}
}
else
{
stream.Write(bytes, 0, bytes.Length);
}
Everything seems to be in order, but the outermost if statement is really redundant, as the following code
int pos = 0;
int chunkSize = 10000;
while (pos < bytes.Length)
{
if (pos + chunkSize > bytes.Length)
chunkSize = bytes.Length - pos;
stream.Write(bytes, pos, chunkSize);
pos += chunkSize;
}
will also handle the case where the array is smaller than the chunk size.

Categories

Resources