I would like to know how I could read a file into ByteArrays that are 4 bytes long.
These arrays will be manipulated and then have to be converted back to a single array ready to be written to a file.
EDIT:
Code snippet.
var arrays = new List<byte[]>();
using (var f = new FileStream("file.cfg.dec", FileMode.Open))
{
for (int i = 0; i < f.Length; i += 4)
{
var b = new byte[4];
var bytesRead = f.Read(b, i, 4);
if (bytesRead < 4)
{
var b2 = new byte[bytesRead];
Array.Copy(b, b2, bytesRead);
arrays.Add(b2);
}
else if (bytesRead > 0)
arrays.Add(b);
}
}
foreach (var b in arrays)
{
BitArray source = new BitArray(b);
BitArray target = new BitArray(source.Length);
target[26] = source[0];
target[31] = source[1];
target[17] = source[2];
target[10] = source[3];
target[30] = source[4];
target[16] = source[5];
target[24] = source[6];
target[2] = source[7];
target[29] = source[8];
target[8] = source[9];
target[20] = source[10];
target[15] = source[11];
target[28] = source[12];
target[11] = source[13];
target[13] = source[14];
target[4] = source[15];
target[19] = source[16];
target[23] = source[17];
target[0] = source[18];
target[12] = source[19];
target[14] = source[20];
target[27] = source[21];
target[6] = source[22];
target[18] = source[23];
target[21] = source[24];
target[3] = source[25];
target[9] = source[26];
target[7] = source[27];
target[22] = source[28];
target[1] = source[29];
target[25] = source[30];
target[5] = source[31];
var back2byte = BitArrayToByteArray(target);
arrays.Clear();
arrays.Add(back2byte);
}
using (var f = new FileStream("file.cfg.enc", FileMode.Open))
{
foreach (var b in arrays)
f.Write(b, 0, b.Length);
}
EDIT 2:
Here is the Ugly Betty-looking code that accomplishes what I wanted. Now I must refine it for performance...
var arrays_ = new List<byte[]>();
var arrays_save = new List<byte[]>();
var arrays = new List<byte[]>();
using (var f = new FileStream("file.cfg.dec", FileMode.Open))
{
for (int i = 0; i < f.Length; i += 4)
{
var b = new byte[4];
var bytesRead = f.Read(b, 0, b.Length);
if (bytesRead < 4)
{
var b2 = new byte[bytesRead];
Array.Copy(b, b2, bytesRead);
arrays.Add(b2);
}
else if (bytesRead > 0)
arrays.Add(b);
}
}
foreach (var b in arrays)
{
arrays_.Add(b);
}
foreach (var b in arrays_)
{
BitArray source = new BitArray(b);
BitArray target = new BitArray(source.Length);
target[26] = source[0];
target[31] = source[1];
target[17] = source[2];
target[10] = source[3];
target[30] = source[4];
target[16] = source[5];
target[24] = source[6];
target[2] = source[7];
target[29] = source[8];
target[8] = source[9];
target[20] = source[10];
target[15] = source[11];
target[28] = source[12];
target[11] = source[13];
target[13] = source[14];
target[4] = source[15];
target[19] = source[16];
target[23] = source[17];
target[0] = source[18];
target[12] = source[19];
target[14] = source[20];
target[27] = source[21];
target[6] = source[22];
target[18] = source[23];
target[21] = source[24];
target[3] = source[25];
target[9] = source[26];
target[7] = source[27];
target[22] = source[28];
target[1] = source[29];
target[25] = source[30];
target[5] = source[31];
var back2byte = BitArrayToByteArray(target);
arrays_save.Add(back2byte);
}
using (var f = new FileStream("file.cfg.enc", FileMode.Open))
{
foreach (var b in arrays_save)
f.Write(b, 0, b.Length);
}
EDIT 3:
Loading a big file into byte arrays of 4 bytes wasn't the smartest idea...
I have over 68 million arrays being processed and manipulated. I really wonder if its possible to load it into a single array and still have the bit manipulation work. :/
Here's another way, similar to #igofed's solution:
var arrays = new List<byte[]>();
using (var f = new FileStream("test.txt", FileMode.Open))
{
for (int i = 0; i < f.Length; i += 4)
{
var b = new byte[4];
var bytesRead = f.Read(b, i, 4);
if (bytesRead < 4)
{
var b2 = new byte[bytesRead];
Array.Copy(b, b2, bytesRead);
arrays.Add(b2);
}
else if (bytesRead > 0)
arrays.Add(b);
}
}
//make changes to arrays
using (var f = new FileStream("test-out.txt", FileMode.Create))
{
foreach (var b in arrays)
f.Write(b, 0, b.Length);
}
Regarding your "Edit 3" ... I'll bite, although it's really a diversion from the original question.
There's no reason you need Lists of arrays, since you're just breaking up the file into a continuous list of 4-byte sequences, looping through and processing each sequence, and then looping through and writing each sequence. You can do much better. NOTE: The implementation below does not check for or handle input files whose lengths are not exactly multiples of 4. I leave that as an exercise to you, if it is important.
To directly address your comment, here is a single-array solution. We'll ditch the List objects, read the whole file into a single byte[] array, and then copy out 4-byte sections of that array to do your bit transforms, then put the result back. At the end we'll just slam the whole thing into the output file.
byte[] data;
using (Stream fs = File.OpenRead("E:\\temp\\test.bmp")) {
data = new byte[fs.Length];
fs.Read(data, 0, data.Length);
}
byte[] element = new byte[4];
for (int i = 0; i < data.Length; i += 4) {
Array.Copy(data, i, element, 0, element.Length);
BitArray source = new BitArray(element);
BitArray target = new BitArray(source.Length);
target[26] = source[0];
target[31] = source[1];
// ...
target[5] = source[31];
target.CopyTo(data, i);
}
using (Stream fs = File.OpenWrite("E:\\temp\\test_out.bmp")) {
fs.Write(data, 0, data.Length);
}
All of the ugly initial read code is gone since we're just using a single byte array. Notice I reserved a single 4-byte array before the processing loop to re-use, so we can save the garbage collector some work. Then we loop through the giant data array 4 bytes at a time and copy them into our working array, use that to initialize the BitArrays for your transforms, and then the last statement in the block converts the BitArray back into a byte array, and copies it directly back to its original location within the giant data array. This replaces BitArrayToByteArray method, since you did not provide it. At the end, writing is also easy since it's just slamming out the now-transformed giant data array.
When I ran your original solution I got an OutOfMemory exception on my original test file of 100MB, so I used a 44MB file. It consumed 650MB in memory and ran in 30 seconds. The single-array solution used 54MB of memory and ran in 10 seconds. Not a bad improvement, and it demonstrates how bad holding onto millions of small array objects is.
Here is what you want:
using (var reader = new StreamReader("inputFileName"))
{
using (var writer = new StreamWriter("outputFileName"))
{
char[] buff = new char[4];
int readCount = 0;
while((readCount = reader.Read(buff, 0, 4)) > 0)
{
//manipulations with buff
writer.Write(buff);
}
}
}
IEnumerable<byte[]> arraysOf4Bytes = File
.ReadAllBytes(path)
.Select((b,i) => new{b, i})
.GroupBy(x => x.i / 4)
.Select(g => g.Select(x => x.b).ToArray())
Related
Is it possible to convert byte[] to Point? I have a canvas and the drawing obtained as Point[]; I need to store it in the database as byte[] and then retrieve it and load it again as Point[].
You can serialize your points into a binary stream to get an array of bytes:
byte[] data;
using (var ms = new MemoryStream()) {
using (var bw = new BinaryWriter(ms)) {
bw.Write(points.Length);
foreach (var p in points) {
bw.Write(p.X);
bw.Write(p.Y);
}
}
data = ms.ToArray();
}
To deserialize your bytes back into an array, reverse the process:
Point[] points;
using (var ms = new MemoryStream(data)) {
using (var r = new BinaryReader(ms)) {
int len = r.ReadInt32();
points = new Point[len];
for (int i = 0 ; i != len ; i++) {
points[i] = new Point(r.ReadInt32(), r.ReadInt32());
}
}
}
Hi I am trying to read a file one byte at a time in reverse order.So far I only managed to read the file from begining to end and write it on another file.
I need to be able to read the file from the end to the begining and print it to another file.
This is what I have so far:
string fileName = Console.ReadLine();
using (FileStream file = new FileStream(fileName ,FileMode.Open , FileAccess.Read))
{
//file.Seek(endOfFile, SeekOrigin.End);
int bytes;
using (FileStream newFile = new FileStream("newsFile.txt" , FileMode.Create , FileAccess.Write))
{
while ((bytes = file.ReadByte()) >= 0)
{
Console.WriteLine(bytes.ToString());
newFile.WriteByte((byte)bytes);
}
}
}
I know that I have to use the Seek method on the fileStream and that gets me to the end of the file.I already did that at the commented protion of the code , but I do not know how to read the file now in the while loop.
How can I achive this?
string fileName = Console.ReadLine();
using (FileStream file = new FileStream(fileName, FileMode.Open, FileAccess.Read))
{
byte[] output = new byte[file.Length]; // reversed file
// read the file backwards using SeekOrigin.Current
//
long offset;
file.Seek(0, SeekOrigin.End);
for (offset = 0; offset < fs.Length; offset++)
{
file.Seek(-1, SeekOrigin.Current);
output[offset] = (byte)file.ReadByte();
file.Seek(-1, SeekOrigin.Current);
}
// write entire reversed file array to new file
//
File.WriteAllBytes("newsFile.txt", output);
}
You could do it by reading one byte at a time, or you could read a larger buffer, write it to the output file in reverse, and continue like that until you've reached the beginning of the file. For example:
string inputFilename = "inputFile.txt";
string outputFilename = "outputFile.txt";
using (ofile = File.OpenWrite(outputFilename))
{
using (ifile = File.OpenRead(inputFilename))
{
int bufferSize = 4096;
byte[] buffer = new byte[bufferSize];
long filePos = ifile.Length;
do
{
long newPos = Math.Max(0, filePos - bufferSize);
int bytesToRead = (int)(filePos - newPos);
ifile.Seek(newPos, SeekOrigin.Set);
int bytesRead = ifile.Read(buffer, 0, bytesToRead);
// write the buffer to the output file, in reverse
for (int i = bytesRead-1; i >= 0; --i)
{
ofile.WriteByte(buffer[i]);
}
filePos = newPos;
} while (filePos > 0);
}
}
An obvious optimization would be to reverse the buffer after you've read it, and then write it in one whole chunk to the output file.
And if you know that the file will fit into memory, it's really easy:
var buffer = File.ReadAllBytes(inputFilename);
// now, reverse the buffer
int i = 0;
int j = buffer.Length-1;
while (i < j)
{
byte b = buffer[i];
buffer[i] = buffer[j];
buffer[j] = b;
++i;
--j;
}
// and write it
File.WriteAllBytes(outputFilename, buffer);
If the file is small (fits in your RAM) then this would work:
public static IEnumerable<byte> Reverse(string inputFilename)
{
var bytes = File.ReadAllBytes(inputFilename);
Array.Reverse(bytes);
foreach (var b in bytes)
{
yield return b;
}
}
Usage:
foreach (var b in Reverse("smallfile.dat"))
{
}
If the file is large (bigger than your RAM) then this would work:
using (var inputFile = File.OpenRead("bigfile.dat"))
using (var inputFileReversed = new ReverseStream(inputFile))
using (var binaryReader = new BinaryReader(inputFileReversed))
{
while (binaryReader.BaseStream.Position != binaryReader.BaseStream.Length)
{
var b = binaryReader.ReadByte();
}
}
It uses the ReverseStream class which can be found here.
Is there a more efficient way to convert byte array to int16 array ?? or is there a way to use Buffer.BlockCopy to copy evry two byte to int16 array ???
public static int[] BYTarrToINT16arr(string fileName)
{
try
{
int bYte = 2;
byte[] buf = File.ReadAllBytes(fileName);
int bufPos = 0;
int[] data = new int[buf.Length/2];
byte[] bt = new byte[bYte];
for (int i = 0; i < buf.Length/2; i++)
{
Array.Copy(buf, bufPos, bt, 0, bYte);
bufPos += bYte;
Array.Reverse(bt);
data[i] = BitConverter.ToInt16(bt, 0);
}
return data;
}
catch
{
return null;
}
}
Use a FileStream and a BinaryReader. Something like this:
var int16List = List<Int16>();
using (var stream = new FileStream(filename, FileMode.Open))
using (var reader = new BinaryReader(stream))
{
try
{
while (true)
int16List.Add(reader.ReadInt16());
}
catch (EndOfStreamException ex)
{
// We've read the whole file
}
}
return int16List.ToArray();
You can also read the whole file into a byte[], and then use a MemoryStream instead of the FileStream if you want.
If you do this then you'll also be able to size the List approrpriately up front and make it a bit more efficient.
Apart from having an off-by-one possibility in case the number of bytes is odd (you'll miss the last byte) your code is OK. You can optimize it by dropping the bt array altogether, swapping i*2 and i*2+1 bytes before calling BitConverter.ToInt16, and passing i*2 as the starting index to the BitConverter.ToInt16 method.
This works if you don't mind using interopservices. I assume it is faster than the other techniques.
using System.Runtime.InteropServices;
public Int16[] Copy_Byte_Buffer_To_Int16_Buffer(byte[] buffer)
{
Int16[] result = new Int16[1];
int size = buffer.Length;
if ((size % 2) != 0)
{
/* Error here */
return result;
}
else
{
result = new Int16[size/2];
IntPtr ptr_src = Marshal.AllocHGlobal (size);
Marshal.Copy (buffer, 0, ptr_src, size);
Marshal.Copy (ptr_src, result, 0, result.Length);
Marshal.FreeHGlobal (ptr_src);
return result;
}
}
var bytes = File.ReadAllBytes(path);
var ints = bytes.TakeWhile((b, i) => i % 2 == 0).Select((b, i) => BitConverter.ToInt16(bytes, i));
if (bytes.Length % 2 == 1)
{
ints = ints.Union(new[] {BitConverter.ToInt16(new byte[] {bytes[bytes.Length - 1], 0}, 0)});
}
return ints.ToArray();
try...
int index = 0;
var my16s = bytes.GroupBy(x => (index++) / 2)
.Select(x => BitConverter.ToInt16(x.Reverse().ToArray(),0)).ToList();
(apologies if this is a duplicate ... i posted but saw no evidence that it actually made it to the forum)
I've been trying to get SlimDX DirectSound working. Here's the code I have. It fills the secondary buffer from a wav file and then, in a thread loop, alternately fills the lower or upper halves of the buffer.
It plays the first load of the buffer fine. The AutoResetEvents fire when they should and the lower half then upper half of the buffer are populated (verified with Debug statements). But playing does not continue after the first load of the buffer. So somehow the repopulation of the buffer doesn't work as it should.
Ideas?
(I'm using DirectSound because it's the only way I've found to set the guid of the audio device that I want to use. Am open to other .NET-friendly approaches.)
private void PlaySound(Guid soundCardGuid, string audioFile) {
DirectSound ds = new DirectSound(soundCardGuid);
ds.SetCooperativeLevel(this.Handle, CooperativeLevel.Priority);
WaveFormat format = new WaveFormat();
format.BitsPerSample = 16;
format.BlockAlignment = 4;
format.Channels = 2;
format.FormatTag = WaveFormatTag.Pcm;
format.SamplesPerSecond = 44100;
format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlignment;
SoundBufferDescription desc = new SoundBufferDescription();
desc.Format = format;
desc.Flags = BufferFlags.GlobalFocus;
desc.SizeInBytes = 8 * format.AverageBytesPerSecond;
PrimarySoundBuffer pBuffer = new PrimarySoundBuffer(ds, desc);
SoundBufferDescription desc2 = new SoundBufferDescription();
desc2.Format = format;
desc2.Flags = BufferFlags.GlobalFocus | BufferFlags.ControlPositionNotify | BufferFlags.GetCurrentPosition2;
desc2.SizeInBytes = 8 * format.AverageBytesPerSecond;
SecondarySoundBuffer sBuffer1 = new SecondarySoundBuffer(ds, desc2);
NotificationPosition[] notifications = new NotificationPosition[2];
notifications[0].Offset = desc2.SizeInBytes / 2 + 1;
notifications[1].Offset = desc2.SizeInBytes - 1; ;
notifications[0].Event = new AutoResetEvent(false);
notifications[1].Event = new AutoResetEvent(false);
sBuffer1.SetNotificationPositions(notifications);
byte[] bytes1 = new byte[desc2.SizeInBytes / 2];
byte[] bytes2 = new byte[desc2.SizeInBytes];
Stream stream = File.Open(audioFile, FileMode.Open);
Thread fillBuffer = new Thread(() => {
int readNumber = 1;
int bytesRead;
bytesRead = stream.Read(bytes2, 0, desc2.SizeInBytes);
sBuffer1.Write<byte>(bytes2, 0, LockFlags.None);
sBuffer1.Play(0, PlayFlags.None);
while (true) {
if (bytesRead == 0) { break; }
notifications[0].Event.WaitOne();
bytesRead = stream.Read(bytes1, 0, bytes1.Length);
sBuffer1.Write<byte>(bytes1, 0, LockFlags.None);
if (bytesRead == 0) { break; }
notifications[1].Event.WaitOne();
bytesRead = stream.Read(bytes1, 0, bytes1.Length);
sBuffer1.Write<byte>(bytes1, desc2.SizeInBytes / 2, LockFlags.None);
}
stream.Close();
stream.Dispose();
});
fillBuffer.Start();
}
}
You haven't set it to loop on the play buffer. Change your code to:
sBuffer1.Play(0, PlayFlags.Looping);
I don't know C++ very well especially the IO part. Can anyone please help me to translate the following C++ code into C#?
unsigned *PostingOffset, *PostingLength, NumTerms;
void LoadSubIndex(char *subindex) {
FILE *in = fopen(subindex, "rb");
if (in == 0) {
printf("Error opening sub-index file '%s'!\n", subindex);
exit(EXIT_FAILURE);
}
int len=0;
// Array of terms
char **Term;
char *TermList;
fread(&NumTerms, sizeof(unsigned), 1, in);
PostingOffset = (unsigned*)malloc(sizeof(unsigned) * NumTerms);
PostingLength = (unsigned*)malloc(sizeof(unsigned) * NumTerms);
Term = (char**)malloc(sizeof(char*) * NumTerms);
Term = (char**)malloc(sizeof(char*) * NumTerms);
// Offset of each posting
fread(PostingOffset, sizeof(unsigned), NumTerms, in);
// Length of each posting in bytes
fread(PostingLength, sizeof(unsigned), NumTerms, in);
// Number of bytes in the posting terms array
fread(&len, sizeof(unsigned), 1, in);
TermList = (char*)malloc(sizeof(char) * len);
fread(TermList, sizeof(unsigned)*len, 1, in);
unsigned k=1;
Term[0] = &TermList[0];
for (int i=1; i<len; i++) {
if (TermList[i-1] == '\0') {
Term[k] = &TermList[i];
k++;
}
}
fclose(in);
}
Thanks in advance.
I'll give you a headstart.
using(var reader = new BinaryReader(new FileStream(subindex, FileMode.Open)) {
int numTerms = reader.ReadUInt32();
postingOffset = new UInt32[numTerms];
postingLength = new UInt32[numTerms];
var term = new byte[numTerms];
for(int i=0;i<numTerms;i++)
postingOffset[i] = reader.ReadUInt32();
for(int i=0;i<numTerms;i++)
postingLength[i] = reader.ReadUInt32();
var len = reader.ReadInt32();
var termList = new ... // byte[] or uint32[] ??
//etc
}
There's no need to close the file handle here - it will close when the using { } block loses scope.
I didn't finish it because there are some flaws in your code. With TermList you are reading in 4 times as much data as you've allocated. You shouldn't be allocating Term twice either - that will result in leaking memory.
To turn Term back into a string, use Encoding.ASCII.GetString(term).TrimEnd('\0');