When I have a string like "0xd8 0xff 0xe0" I do
Text.Split(' ').Select(part => byte.Parse(part, System.Globalization.NumberStyles.HexNumber)).ToArray();
But if I got string like "0xd8ffe0" I don't know what to do ?
also I'm able for recommendations how to write byte array as one string.
You need to scrub your string before you start parsing it. First, remove the leading 0x, then just skip any spaces as you enumerate the string. But using LINQ for this is probably not the best approach. For one, the code won't be very readable and it'll be hard to step through if you're debugging. But also, there are some tricks you can do to make hex/byte conversions very fast. For example, don't use Byte.Parse, but instead use array indexing to "look up" the corresponding value.
A while back I implemented a HexEncoding class that derives from the Encoding base class much like ASCIIEncoding and UTF8Encoding, etc. Using it is very simple. It's pretty well optimized too which can be very important depending on the size of your data.
var enc = new HexEncoding();
byte[] bytes = enc.GetBytes(str); // convert hex string to byte[]
str = enc.GetString(bytes); // convert byte[] to hex string
Here's the complete class, I know it's kinda big for a post but I've stripped out the doc comments.
public sealed class HexEncoding : Encoding
{
public static readonly HexEncoding Hex = new HexEncoding( );
private static readonly char[] HexAlphabet;
private static readonly byte[] HexValues;
static HexEncoding( )
{
HexAlphabet = new char[] { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
HexValues = new byte[255];
for ( int i = 0 ; i < HexValues.Length ; i++ ) {
char c = (char)i;
if ( "0123456789abcdefABCDEF".IndexOf( c ) > -1 ) {
HexValues[i] = System.Convert.ToByte( c.ToString( ), 16 );
} // if
} // for
}
public override string EncodingName
{
get
{
return "Hex";
}
}
public override bool IsSingleByte
{
get
{
return true;
}
}
public override int GetByteCount( char[] chars, int index, int count )
{
return count / 2;
}
public override int GetBytes( char[] chars, int charIndex, int charCount, byte[] bytes, int byteIndex )
{
int ci = charIndex;
int bi = byteIndex;
while ( ci < ( charIndex + charCount ) ) {
char c1 = chars[ci++];
char c2 = chars[ci++];
byte b1 = HexValues[(int)c1];
byte b2 = HexValues[(int)c2];
bytes[bi++] = (byte)( b1 << 4 | b2 );
} // while
return charCount / 2;
}
public override int GetCharCount( byte[] bytes, int index, int count )
{
return count * 2;
}
public override int GetChars( byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex )
{
int ci = charIndex;
int bi = byteIndex;
while ( bi < ( byteIndex + byteCount ) ) {
int b1 = bytes[bi] >> 4;
int b2 = bytes[bi++] & 0xF;
char c1 = HexAlphabet[b1];
char c2 = HexAlphabet[b2];
chars[ci++] = c1;
chars[ci++] = c2;
} // while
return byteCount * 2;
}
public override int GetMaxByteCount( int charCount )
{
return charCount / 2;
}
public override int GetMaxCharCount( int byteCount )
{
return byteCount * 2;
}
} // class
Hex String to byte[]:
byte[] bytes = new byte[value.Length / 2];
for (int i = 0; i < value.Length; i += 2)
{
bytes[i / 2] = Convert.ToByte(value.Substring(i, 2), 16);
}
If you have "0x" at the beginning you should skip two bytes.
byte[] or any IEnumerable<Byte> -> Hex String:
return sequence.Aggregate(string.Empty,
(result, value) => result +
string.Format(CultureInfo.InvariantCulture, "{0:x2}", value));
Related
I need to check for a string located inside a packet that I receive as byte array. If I use BitConverter.ToString(), I get the bytes as string with dashes (f.e.: 00-50-25-40-A5-FF).
I tried most functions I found after a quick googling, but most of them have input parameter type string and if I call them with the string with dashes, It throws an exception.
I need a function that turns hex(as string or as byte) into the string that represents the hexadecimal value(f.e.: 0x31 = 1). If the input parameter is string, the function should recognize dashes(example "47-61-74-65-77-61-79-53-65-72-76-65-72"), because BitConverter doesn't convert correctly.
Like so?
static void Main()
{
byte[] data = FromHex("47-61-74-65-77-61-79-53-65-72-76-65-72");
string s = Encoding.ASCII.GetString(data); // GatewayServer
}
public static byte[] FromHex(string hex)
{
hex = hex.Replace("-", "");
byte[] raw = new byte[hex.Length / 2];
for (int i = 0; i < raw.Length; i++)
{
raw[i] = Convert.ToByte(hex.Substring(i * 2, 2), 16);
}
return raw;
}
For Unicode support:
public class HexadecimalEncoding
{
public static string ToHexString(string str)
{
var sb = new StringBuilder();
var bytes = Encoding.Unicode.GetBytes(str);
foreach (var t in bytes)
{
sb.Append(t.ToString("X2"));
}
return sb.ToString(); // returns: "48656C6C6F20776F726C64" for "Hello world"
}
public static string FromHexString(string hexString)
{
var bytes = new byte[hexString.Length / 2];
for (var i = 0; i < bytes.Length; i++)
{
bytes[i] = Convert.ToByte(hexString.Substring(i * 2, 2), 16);
}
return Encoding.Unicode.GetString(bytes); // returns: "Hello world" for "48656C6C6F20776F726C64"
}
}
string str = "47-61-74-65-77-61-79-53-65-72-76-65-72";
string[] parts = str.Split('-');
foreach (string val in parts)
{
int x;
if (int.TryParse(val, out x))
{
Console.Write(string.Format("{0:x2} ", x);
}
}
Console.WriteLine();
You can split the string at the -
Convert the text to ints (int.TryParse)
Output the int as a hex string {0:x2}
string hexString = "8E2";
int num = Int32.Parse(hexString, System.Globalization.NumberStyles.HexNumber);
Console.WriteLine(num);
//Output: 2274
From https://msdn.microsoft.com/en-us/library/bb311038.aspx
Your reference to "0x31 = 1" makes me think you're actually trying to convert ASCII values to strings - in which case you should be using something like Encoding.ASCII.GetString(Byte[])
If you need the result as byte array, you should pass it directly without changing it to a string, then change it back to bytes.
In your example the (f.e.: 0x31 = 1) is the ASCII codes. In that case to convert a string (of hex values) to ASCII values use:
Encoding.ASCII.GetString(byte[])
byte[] data = new byte[] { 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x30 };
string ascii=Encoding.ASCII.GetString(data);
Console.WriteLine(ascii);
The console will display: 1234567890
My Net 5 solution that also handles null characters at the end:
hex = ConvertFromHex( hex.AsSpan(), Encoding.Default );
static string ConvertFromHex( ReadOnlySpan<char> hexString, Encoding encoding )
{
int realLength = 0;
for ( int i = hexString.Length - 2; i >= 0; i -= 2 )
{
byte b = byte.Parse( hexString.Slice( i, 2 ), NumberStyles.HexNumber, CultureInfo.InvariantCulture );
if ( b != 0 ) //not NULL character
{
realLength = i + 2;
break;
}
}
var bytes = new byte[realLength / 2];
for ( var i = 0; i < bytes.Length; i++ )
{
bytes[i] = byte.Parse( hexString.Slice( i * 2, 2 ), NumberStyles.HexNumber, CultureInfo.InvariantCulture );
}
return encoding.GetString( bytes );
}
One-liners:
var input = "Hallo Hélène and Mr. Hörst";
var ConvertStringToHexString = (string input) => String.Join("", Encoding.UTF8.GetBytes(input).Select(b => $"{b:X2}"));
var ConvertHexToString = (string hexInput) => Encoding.UTF8.GetString(Enumerable.Range(0, hexInput.Length / 2).Select(_ => Convert.ToByte(hexInput.Substring(_ * 2, 2), 16)).ToArray());
Assert.AreEqual(input, ConvertHexToString(ConvertStringToHexString(input)));
Not sure if I am in the right direction.
I can't find info about tilde.
int n = 5;
int m = ~n;
string numAsString = Convert.ToString(~n, 2);
char[] NumAsChar = numAsString.ToCharArray();
long l = Convert.ToInt64(numAsString, 2);
Console.WriteLine(numAsString);
Console.WriteLine(l);
You're probably looking for a simple answer.
int n = 5;
byte[] nbytes = BitConverter.GetBytes(n);
for(int i = 0 ; i < nbytes.Length; i++)
nbytes[i] = ~nbytes[i];
n = BitConverter.ToInt32(nbytes, 0);
edit: you actually can't do ~ on a byte[]. You can either do
for(int i = 0 ; i < nbytes.Length; i++)
nbytes[i] = ~nbytes[i];
or just not use a byte array at all.
For clarity's sake, do note that you can just do
n = ~n;
and skip doing any of the separation. But you specifically asked for the byte conversion.
Use these 2 methods
static byte[] GetBytes(string str)
{
byte[] bytes = new byte[str.Length * sizeof(char)];
System.Buffer.BlockCopy(str.ToCharArray(), 0, bytes, 0, bytes.Length);
return bytes;
}
static string GetString(byte[] bytes)
{
char[] chars = new char[bytes.Length / sizeof(char)];
System.Buffer.BlockCopy(bytes, 0, chars, 0, bytes.Length);
return new string(chars);
}
And then use them like this
byte[] bytes = GetTheBytes(str);
byte[] reversed = bytes.Reverse().ToArray();
var revStr = GetString(reversed)
I did it like this.Any suggestions on making it simpler.
int n = 100;
//Convert decimal to binary
string numAsString = Convert.ToString(n, 2);
char[] NumAsChar = numAsString.ToCharArray();
Console.WriteLine(numAsString);
//Invert bits
for (int i = 0; i < numAsString.Length; i++)
{
if (NumAsChar[i] == '0')
{
NumAsChar[i] = '1';
}
else
{
NumAsChar[i] = '0';
}
}
string NewNumAsString = new string(NumAsChar);
//Convert inverted binary num to decimal
long l = Convert.ToInt64(NewNumAsString, 2);
Console.WriteLine(NewNumAsString);
Console.WriteLine(l);
I am trying to adapt this code that can perform conversions to and from Base 52, which I am using to store RGB color information from C# to C++:
public static string ColourToBase52(Color colour)
{
int value = colour.ToArgb() & 0x00FFFFFF; // Mask off the alpha channel.
return ToBase52(value);
}
public static Color ColourFromBase52(string colour)
{
int value = FromBase52(colour);
return Color.FromArgb(unchecked((int)(0xFF000000 | value)));
}
public static string ToBase52(int value)
{
char[] baseChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ".ToCharArray();
int targetBase = baseChars.Length;
int i = 32;
char[] buffer = new char[i];
do
{
buffer[--i] = baseChars[value % targetBase];
value = value / targetBase;
}
while (value > 0);
char[] result = new char[32 - i];
Array.Copy(buffer, i, result, 0, 32 - i);
return new string(result).PadLeft(5, 'a');
}
public static int FromBase52(string value)
{
char[] baseChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ".ToCharArray();
int targetbase = baseChars.Length;
int multiplier = 1;
int result = 0;
for (int i = value.Length-1; i >= 0; --i)
{
int digit = Array.IndexOf(baseChars, value[i]);
result += digit*multiplier;
multiplier *= targetbase;
}
return result;
}
For my C++ code, I have opted to combine the functions that get and return the color value as an integer with the Base 52 conversion functions:
struct DIFColor *DIFBase52ToColor(std::string c)
{
const char *baseChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
int targetBase = 52;
int multiplier = 1;
int result = 0;
const char *d = c.c_str();
for (int i = c.length() - 1; i >= 0; --i)
{
int digit = DIFGetPositionInArray(baseChars, sizeof(baseChars), c[i]);
result += digit * multiplier;
multiplier = multiplier * targetBase;
}
uint8_t b = result & 255;
uint8_t g = (result >> 8) & 255;
uint8_t r = (result >> 16) * 255;
return CreateDIFColor(r,g,b);
}
std::string DIFColorToBase52(struct DIFColor *c)
{
int rgb = ((c->r&0x0ff)<<16)|((c->g&0x0ff)<<8)|(c->b&0x0ff);
const char *baseChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
int targetBase = 52;
int i = 32;
char *buffer = new char[i];
do
{
buffer[--i] = baseChars[rgb % targetBase];
rgb = rgb / targetBase;
}
while (rgb > 0);
char *result = new char[32 - i];
DIFCopyCharArray((const char *)buffer, i, 0, 32 - i, result);
std::string s((const char*)result);
s.insert(s.begin(), 5 - s.size(), 'a');
return s;
}
I also had to create two functions for array manipulation:
int DIFGetPositionInArray(const char *array, size_t size, const char c)
{
for (size_t i = 0; i < size; i++)
{
if (array[i] == c)
return (int)i;
}
return -1;
}
void DIFCopyCharArray(const char* source, int wheretostart, int wheretocopy, int numtocopy, char *dest)
{
int c = wheretocopy;
for(int i = wheretostart; i <= numtocopy; i++)
{
dest[c] = source[i];
c++;
}
}
However, when I tried to test it with a sanity check, it failed:
255,255,255 = 'aah1U' in Base52 RGB
aah1U = 1,245,59 in RGB
It also seems that every time I run the sanity check, a different value is produced:
255,255,255 = 'aah13' in Base52 RGB
aah13 = 1,245,59 in RGB
255,255,255 = 'aah1j' in Base52 RGB
aah1j = 1,245,59 in RGB
The expected output was:
255,255,255 = 'cpqEN' in Base52 RGB
cpqEN = 255,255,255 in RGB
Making me think that this is possibly a pointer problem.
The error is probably that you don't terminate the result string anywhere, which leads to undefined behavior in the following:
std::string s((const char*)result);
This is because the std::string constructor looks for the terminator when copying the C-style string you pass to it.
You can solve it two ways: Either add the terminator character '\0' to result, or tell the std::string constructor the length of result.
The problem lies in the fact that the array copy function is incorrect. It should be:
void DIFCopyCharArray(const char* source, int wheretostart, int wheretocopy, int numtocopy, char *dest)
{
int c = wheretocopy;
for(int i = wheretostart; c <= numtocopy; i++)
{
dest[c] = source[i];
c++;
}
dest[c] = '\0';
}
Also, the array search function does not work because sizeof(baseChars) returns 4, which is not the number of elements.
Use a function like this:
int DIFGetPositionInArray(const char *array, int arrayElements, const char c)
{
for (int i = 0; i < arrayElements; i++)
{
if (array[i] == c)
return i;
}
return -1;
}
And call it like this;
DIFGetPositionInArray(baseChars,52,d[i]);
I'm trying to convert a bit-string to ASCII characters by 8 bits (each 8 bits = 1 ASCII char).
public string BitsToChar(string InpS)
{
string RetS = "";
for (int iCounter = 0; iCounter < InpS.Length / 8; iCounter++)
RetS = System.String.Concat(RetS, (char)Convert.ToByte(InpS.Substring(iCounter * 8, 8)), 2);
return RetS;
}
It throws a System.OverflowException: Value was either too large or too small for an unsigned byte.
It's not clear for me how comes that an 8-bit portion of a binary string can be too small or too large for an 8-bit Byte type.
Any ideas? Thank you.
Try something like that:
private static Char ConvertToChar(String value) {
int result = 0;
foreach (Char ch in value)
result = result * 2 + ch - '0';
return (Char) result;
}
public string BitsToChar(string value) {
if (String.IsNullOrEmpty(value))
return value;
StringBuilder Sb = new StringBuilder();
for (int i = 0; i < value.Length / 8; ++i)
Sb.Append(ConvertToChar(value.Substring(8 * i, 8)));
return Sb.ToString();
}
...
String result = BitsToChar("010000010010000001100010"); // <- "A b"
Do something like this
public string BitsToChar(string InpS)
{
string RetS = "";
foreach (char c in InpS)
{
RetS = RetS + System.Convert.ToInt32(c);
}
return RetS;
}
Try something like that:
public static string BitsToChar(string bitString)
{
var retString = new StringBuilder();
foreach (Match match in Regex.Matches(bitString, "[01]{8}")) // 8 is size of bits
{
retString.Append((Char)Convert.ToByte(match.Value, 2));
}
return retString.ToString();
}
I need to check for a string located inside a packet that I receive as byte array. If I use BitConverter.ToString(), I get the bytes as string with dashes (f.e.: 00-50-25-40-A5-FF).
I tried most functions I found after a quick googling, but most of them have input parameter type string and if I call them with the string with dashes, It throws an exception.
I need a function that turns hex(as string or as byte) into the string that represents the hexadecimal value(f.e.: 0x31 = 1). If the input parameter is string, the function should recognize dashes(example "47-61-74-65-77-61-79-53-65-72-76-65-72"), because BitConverter doesn't convert correctly.
Like so?
static void Main()
{
byte[] data = FromHex("47-61-74-65-77-61-79-53-65-72-76-65-72");
string s = Encoding.ASCII.GetString(data); // GatewayServer
}
public static byte[] FromHex(string hex)
{
hex = hex.Replace("-", "");
byte[] raw = new byte[hex.Length / 2];
for (int i = 0; i < raw.Length; i++)
{
raw[i] = Convert.ToByte(hex.Substring(i * 2, 2), 16);
}
return raw;
}
For Unicode support:
public class HexadecimalEncoding
{
public static string ToHexString(string str)
{
var sb = new StringBuilder();
var bytes = Encoding.Unicode.GetBytes(str);
foreach (var t in bytes)
{
sb.Append(t.ToString("X2"));
}
return sb.ToString(); // returns: "48656C6C6F20776F726C64" for "Hello world"
}
public static string FromHexString(string hexString)
{
var bytes = new byte[hexString.Length / 2];
for (var i = 0; i < bytes.Length; i++)
{
bytes[i] = Convert.ToByte(hexString.Substring(i * 2, 2), 16);
}
return Encoding.Unicode.GetString(bytes); // returns: "Hello world" for "48656C6C6F20776F726C64"
}
}
string str = "47-61-74-65-77-61-79-53-65-72-76-65-72";
string[] parts = str.Split('-');
foreach (string val in parts)
{
int x;
if (int.TryParse(val, out x))
{
Console.Write(string.Format("{0:x2} ", x);
}
}
Console.WriteLine();
You can split the string at the -
Convert the text to ints (int.TryParse)
Output the int as a hex string {0:x2}
string hexString = "8E2";
int num = Int32.Parse(hexString, System.Globalization.NumberStyles.HexNumber);
Console.WriteLine(num);
//Output: 2274
From https://msdn.microsoft.com/en-us/library/bb311038.aspx
Your reference to "0x31 = 1" makes me think you're actually trying to convert ASCII values to strings - in which case you should be using something like Encoding.ASCII.GetString(Byte[])
If you need the result as byte array, you should pass it directly without changing it to a string, then change it back to bytes.
In your example the (f.e.: 0x31 = 1) is the ASCII codes. In that case to convert a string (of hex values) to ASCII values use:
Encoding.ASCII.GetString(byte[])
byte[] data = new byte[] { 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x30 };
string ascii=Encoding.ASCII.GetString(data);
Console.WriteLine(ascii);
The console will display: 1234567890
My Net 5 solution that also handles null characters at the end:
hex = ConvertFromHex( hex.AsSpan(), Encoding.Default );
static string ConvertFromHex( ReadOnlySpan<char> hexString, Encoding encoding )
{
int realLength = 0;
for ( int i = hexString.Length - 2; i >= 0; i -= 2 )
{
byte b = byte.Parse( hexString.Slice( i, 2 ), NumberStyles.HexNumber, CultureInfo.InvariantCulture );
if ( b != 0 ) //not NULL character
{
realLength = i + 2;
break;
}
}
var bytes = new byte[realLength / 2];
for ( var i = 0; i < bytes.Length; i++ )
{
bytes[i] = byte.Parse( hexString.Slice( i * 2, 2 ), NumberStyles.HexNumber, CultureInfo.InvariantCulture );
}
return encoding.GetString( bytes );
}
One-liners:
var input = "Hallo Hélène and Mr. Hörst";
var ConvertStringToHexString = (string input) => String.Join("", Encoding.UTF8.GetBytes(input).Select(b => $"{b:X2}"));
var ConvertHexToString = (string hexInput) => Encoding.UTF8.GetString(Enumerable.Range(0, hexInput.Length / 2).Select(_ => Convert.ToByte(hexInput.Substring(_ * 2, 2), 16)).ToArray());
Assert.AreEqual(input, ConvertHexToString(ConvertStringToHexString(input)));