Incorrect CRC8 checksum computation - c#

I got this class to compute the CRC8 checksum of a byte[]:
public static class Crc8
{
static byte[] table = new byte[256];
// x8 + x7 + x6 + x4 + x2 + 1
const byte poly = 0xd5;
public static byte ComputeChecksum(params byte[] bytes)
{
byte crc = 0;
if (bytes != null && bytes.Length > 0)
{
foreach (byte b in bytes)
{
crc = table[crc ^ b];
}
}
return crc;
}
static Crc8()
{
for (int i = 0; i < 256; ++i)
{
int temp = i;
for (int j = 0; j < 8; ++j)
{
if ((temp & 0x80) != 0)
{
temp = (temp << 1) ^ poly;
}
else
{
temp <<= 1;
}
}
table[i] = (byte)temp;
}
}
}
And in the Main I got:
static void Main(string[] args)
{
string number = "123456789";
Console.WriteLine(Convert.ToByte(Crc8.ComputeChecksum(StringToByteArray(number))).ToString("x2"));
Console.ReadLine();
}
private static byte[] StringToByteArray(string str)
{
ASCIIEncoding enc = new ASCIIEncoding();
return enc.GetBytes(str);
}
This results in 0xBC
However, according to: http://www.scadacore.com/field-tools/programming-calculators/online-checksum-calculator/
this is incorrect, because the checksum for the CheckSum8 Xor is 0x31.
What did I wrong there?

On the linked site only some 16 and 32 bit CRCs are listed, the
CheckSum8Xor is not a CRC. The 0xBC comes from a 8-bit CRC
called "CRC-8/DVB-S2", see http://reveng.sourceforge.net/crc-catalogue/1-15.htm

Ah, ok, so I've overiterpreted this checksum computation.
Well, in that case, it's easy:
public static byte Checksum8XOR(byte[] data)
{
byte checksum = 0x00;
for (int i = 0; i < data.Length; i++)
{
checksum ^= data[i];
}
return checksum;
}

Related

ARC4 encryption not working correctly server side

I have a socket.io client which sends data to each other where encryption is based on ARC4.
I tried multiple different scenarios but it keeps failing to decrypt anything and I'm not sure why.
The class: ARC4_New
public class ARC4_New
{
private int i;
private int j;
private byte[] bytes;
public const int POOLSIZE = 256;
public ARC4_New()
{
bytes = new byte[POOLSIZE];
}
public ARC4_New(byte[] key)
{
bytes = new byte[POOLSIZE];
this.Initialize(key);
}
public void Initialize(byte[] key)
{
this.i = 0;
this.j = 0;
for (i = 0; i < POOLSIZE; ++i)
{
this.bytes[i] = (byte)i;
}
for (i = 0; i < POOLSIZE; ++i)
{
j = (j + bytes[i] + key[i % key.Length]) & (POOLSIZE - 1);
this.Swap(i, j);
}
this.i = 0;
this.j = 0;
}
private void Swap(int a, int b)
{
byte t = this.bytes[a];
this.bytes[a] = this.bytes[b];
this.bytes[b] = t;
}
public byte Next()
{
this.i = ++this.i & (POOLSIZE - 1);
this.j = (this.j + this.bytes[i]) & (POOLSIZE - 1);
this.Swap(i, j);
return this.bytes[(this.bytes[i] + this.bytes[j]) & 255];
}
public void Encrypt(ref byte[] src)
{
for (int k = 0; k < src.Length; k++)
{
src[k] ^= this.Next();
}
}
public void Decrypt(ref byte[] src)
{
this.Encrypt(ref src);
}
}
public System.Numerics.BigInteger RandomInteger(int bitSize)
{
var integerData = new byte[bitSize / 8];
_numberGenerator.NextBytes(integerData);
integerData[integerData.Length - 1] &= 0x7f;
return new System.Numerics.BigInteger(integerData);
}
My script which generates a key:
System.Numerics.BigInteger DHPrivate = RandomInteger(256);
System.Numerics.BigInteger DHPrimal = RandomInteger(256);
System.Numerics.BigInteger DHGenerated = RandomInteger(256);
if (DHGenerated > DHPrimal)
{
System.Numerics.BigInteger tempG = DHGenerated;
DHGenerated= DHPrimal;
DHPrimal = tempG;
}
Then with those values I generate a public key:
System.Numerics.BigInteger DHPublic = System.Numerics.BigInteger.ModPow(DHGenerated, DHPrivate, DHPrimal);
Then I encrypt this key:
string pkey = EncryptY(CalculatePublic, DHPublic);
(Additional code for the encryption below)
protected virtual string EncryptY(Func<System.Numerics.BigInteger, System.Numerics.BigInteger> calculator, System.Numerics.BigInteger value)
{
byte[] valueData = Encoding.UTF8.GetBytes(value.ToString());
valueData = PKCSPad(valueData);
Array.Reverse(valueData);
var paddedInteger = new System.Numerics.BigInteger(valueData);
System.Numerics.BigInteger calculatedInteger = calculator(paddedInteger);
byte[] paddedData = calculatedInteger.ToByteArray();
Array.Reverse(paddedData);
string encryptedValue = Utils.Converter.BytesToHexString(paddedData).ToLower();
return encryptedValue.StartsWith("00") ? encryptedValue.Substring(2) : encryptedValue;
}
protected virtual byte[] PKCSPad(byte[] data)
{
var buffer = new byte[128 - 1];
int dataStartPos = (buffer.Length - data.Length);
buffer[0] = (byte)Padding;
Buffer.BlockCopy(data, 0, buffer, dataStartPos, data.Length);
int paddingEndPos = (dataStartPos - 1);
bool isRandom = (Padding == PKCSPadding.RandomByte);
for (int i = 1; i < paddingEndPos; i++)
{
buffer[i] = (byte)(isRandom ?
_numberGenerator.Next(1, 256) : byte.MaxValue);
}
return buffer;
}
After all that I sent the string PKEY to the server.
And after decrypting the string, the server gets the public key which is for example: 127458393
When I connect both my client and server using: 127458393
Like:
BigInteger key = System.Numerics.BigInteger.Parse("127458393");
client = new ARC4_New(PrimalDing.ToByteArray());
My client sends a string like:
client.Encrypt(BYTE_HERE);
And my server reads it like:
client.Decrypt(BYTE_HERE);
But it fails, and gets a random unreadable string.
What am I doing wrong here?
I managed to fix the issue
For some reason, my server was and is reversing the bytes i used in the ARC4 client..
So i simple reverse it now as a hotfix
System.Numerics.BigInteger temp = System.Numerics.BigInteger.Parse(textBox1.Text);
client = new ARC4_New(temp.ToByteArray().Reverse().ToArray());

How to generate a CRC-16 from C#

I am trying to generate a CRC-16 using C#. The hardware I am using for RS232 requires the input string to be HEX. The screenshot below shows the correct conversion, For a test, I need 8000 to be 0xC061, however the C# method that generates CRC-16 must be able to convert any given HEX string.
I have tried using Nito.KitchenSink.CRC
I have also tried the below which generates 8009 when 8000 is inputted -
public string CalcCRC16(string strInput)
{
ushort crc = 0x0000;
byte[] data = GetBytesFromHexString(strInput);
for (int i = 0; i < data.Length; i++)
{
crc ^= (ushort)(data[i] << 8);
for (int j = 0; j < 8; j++)
{
if ((crc & 0x8000) > 0)
crc = (ushort)((crc << 1) ^ 0x8005);
else
crc <<= 1;
}
}
return crc.ToString("X4");
}
public Byte[] GetBytesFromHexString(string strInput)
{
Byte[] bytArOutput = new Byte[] { };
if (!string.IsNullOrEmpty(strInput) && strInput.Length % 2 == 0)
{
SoapHexBinary hexBinary = null;
try
{
hexBinary = SoapHexBinary.Parse(strInput);
if (hexBinary != null)
{
bytArOutput = hexBinary.Value;
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
return bytArOutput;
}
Here we go; note that this is a specific flavor of CRC-16 - it is confusing to say just "CRC-16". This borrows some implementation specifics from http://www.sanity-free.com/ - note I have made it static rather than instance-based.
using System;
static class Program
{
static void Main()
{
string input = "8000";
var bytes = HexToBytes(input);
string hex = Crc16.ComputeChecksum(bytes).ToString("x2");
Console.WriteLine(hex); //c061
}
static byte[] HexToBytes(string input)
{
byte[] result = new byte[input.Length / 2];
for(int i = 0; i < result.Length; i++)
{
result[i] = Convert.ToByte(input.Substring(2 * i, 2), 16);
}
return result;
}
public static class Crc16
{
const ushort polynomial = 0xA001;
static readonly ushort[] table = new ushort[256];
public static ushort ComputeChecksum(byte[] bytes)
{
ushort crc = 0;
for (int i = 0; i < bytes.Length; ++i)
{
byte index = (byte)(crc ^ bytes[i]);
crc = (ushort)((crc >> 8) ^ table[index]);
}
return crc;
}
static Crc16()
{
ushort value;
ushort temp;
for (ushort i = 0; i < table.Length; ++i)
{
value = 0;
temp = i;
for (byte j = 0; j < 8; ++j)
{
if (((value ^ temp) & 0x0001) != 0)
{
value = (ushort)((value >> 1) ^ polynomial);
}
else
{
value >>= 1;
}
temp >>= 1;
}
table[i] = value;
}
}
}
}
In Addition, If you want CRC16-CCITT.
private ushort Crc16Ccitt(byte[] bytes)
{
const ushort poly = 4129;
ushort[] table = new ushort[256];
ushort initialValue = 0xffff;
ushort temp, a;
ushort crc = initialValue;
for (int i = 0; i < table.Length; ++i)
{
temp = 0;
a = (ushort)(i << 8);
for (int j = 0; j < 8; ++j)
{
if (((temp ^ a) & 0x8000) != 0)
temp = (ushort)((temp << 1) ^ poly);
else
temp <<= 1;
a <<= 1;
}
table[i] = temp;
}
for (int i = 0; i < bytes.Length; ++i)
{
crc = (ushort)((crc << 8) ^ table[((crc >> 8) ^ (0xff & bytes[i]))]);
}
return crc;
}

how i make this code from encrypt to decrypt in C# i use Xor and Or [closed]

It's difficult to tell what is being asked here. This question is ambiguous, vague, incomplete, overly broad, or rhetorical and cannot be reasonably answered in its current form. For help clarifying this question so that it can be reopened, visit the help center.
Closed 11 years ago.
i try use this code to make decrypt it but it's have problem i use CryperCounter to use make counter in first make PrepareAuthCryptography then use authCryptography key2 use >> 8 and Key1 use & 0xff see the code and you will understand it
public class AuthCryptography
{
class CryptCounter
{
public CryptCounter()
{
}
public CryptCounter(ushort with)
{
m_Counter = with;
}
UInt16 m_Counter = 0;
public byte Key2
{
get { return (byte)(m_Counter >> 8); }
}
public byte Key1
{
get { return (byte)(m_Counter & 0xFF); }
}
public void Increment()
{
m_Counter++;
}
}
private CryptCounter _decryptCounter;
private CryptCounter _encryptCounter;
private static byte[] _cryptKey1;
private static byte[] _cryptKey2;
public static void PrepareAuthCryptography()
{
if (_cryptKey1 != null)
{
if (_cryptKey1.Length != 0)
return;
}
_cryptKey1 = new byte[0x100];
_cryptKey2 = new byte[0x100];
byte i_key1 = 0x9D;
byte i_key2 = 0x62;
for (int i = 0; i < 0x100; i++)
{
_cryptKey1[i] = i_key1;
_cryptKey2[i] = i_key2;
i_key1 = (byte)((0x0F + (byte)(i_key1 * 0xFA)) * i_key1 + 0x13);
i_key2 = (byte)((0x79 - (byte)(i_key2 * 0x5C)) * i_key2 + 0x6D);
}
}
public AuthCryptography()
{
_encryptCounter = new CryptCounter();
_decryptCounter = new CryptCounter();
}
public void Encrypt(byte[] buffer)
{
for (int i = 0; i < buffer.Length; i++)
{
buffer[i] ^= (byte)0xAB;
buffer[i] = (byte)(buffer[i] >> 4 | buffer[i] << 4);
buffer[i] ^= (byte)(_cryptKey1[_encryptCounter.Key1] ^ _cryptKey2[_encryptCounter.Key2]);
_encryptCounter.Increment();
}
}
public void Decrypt(byte[] buffer)
{
for (int i = 0; i < buffer.Length; i++)
{
buffer[i] ^= (byte)0xAB;
buffer[i] = (byte)(buffer[i] >> 4 | buffer[i] << 4);
buffer[i] ^= (byte)(_cryptKey2[_decryptCounter.Key2] ^ _cryptKey1[_decryptCounter.Key1]);
_decryptCounter.Increment();
}
}
public void Decrypt4(byte[] buffer)
{
for (int i = 0; i < buffer.Length; i++)
{
buffer[i] ^= (byte)(_cryptKey1[_encryptCounter.Key1] ^ _cryptKey2[_encryptCounter.Key2]);
buffer[i] = (byte)(buffer[i] << 4 | buffer[i] >> 4);
buffer[i] ^= (byte)0xAB;
_encryptCounter.Increment();
}
}
}
The decrypt should be the inverse of the encrypt, in this case (a pipeline of reversible operations), just reverse the operations. Like this (not tested):
public static void Decrypt(byte[] buffer)
{
for (int i = 0; i < buffer.Length; i++)
{
int temp = buffer[i] ^ _cryptKey2[_decryptCounter.Key2] ^ _cryptKey1[_decryptCounter.Key1];
temp = (temp >> 4) | (temp << 4);
temp ^= 0xAB;
buffer[i] = (byte)temp;
_decryptCounter.Increment();
}
}
That's equivalent to:
public static void Decrypt(byte[] buffer)
{
for (int i = 0; i < buffer.Length; i++)
{
int temp = buffer[i] ^ 0xBA ^ _cryptKey2[_decryptCounter.Key2] ^ _cryptKey1[_decryptCounter.Key1];
temp = (temp >> 4) | (temp << 4);
buffer[i] = (byte)temp;
_decryptCounter.Increment();
}
}

CRC_CCITT Kermit 16 in C#

I'm currently working on a hard way that requires the CRC_CCITT Kermit 16 protocol with the formula (X16 + X12 + X5 + 1). However some of the code I've found online both on this site or the web in general I don't seem to get my desired result. I saw this website (http://www.lammertbies.nl/comm/info/crc-calculation.html) that actually provides me with the exact match I want but it was written in C++. So can anyone help me with this?
I look forward to hearing from you.
Kind regards
Michael
if you need CRC CCITT 16 Kermit you'll need the following code (from my site):
var crc16Kermit = new Crc16( Crc16Mode.CcittKermit );
var checksum = crc16Kermit.ComputeChecksumBytes( 0x01, 0x23, 0x45 );
// checksum = 0x2e, 0x46
here's the source for the above code
using System;
public enum Crc16Mode : ushort { Standard = 0xA001, CcittKermit = 0x8408 }
public class Crc16 {
static ushort[] table = new ushort[256];
public ushort ComputeChecksum( params byte[] bytes ) {
ushort crc = 0;
for(int i = 0; i < bytes.Length; ++i) {
byte index = (byte)(crc ^ bytes[i]);
crc = (ushort)((crc >> 8) ^ table[index]);
}
return crc;
}
public byte[] ComputeChecksumBytes( params byte[] bytes ) {
ushort crc = ComputeChecksum( bytes );
return BitConverter.GetBytes( crc );
}
public Crc16( Crc16Mode mode ) {
ushort polynomial = (ushort)mode;
ushort value;
ushort temp;
for(ushort i = 0; i < table.Length; ++i) {
value = 0;
temp = i;
for(byte j = 0; j < 8; ++j) {
if(((value ^ temp) & 0x0001) != 0) {
value = (ushort)((value >> 1) ^ polynomial);
}else {
value >>= 1;
}
temp >>= 1;
}
table[i] = value;
}
}
}
link for the above code: http://sanity-free.org/147/standard_crc16_and_crc16_kermit_implementation_in_csharp.html
Seems you want CRC16 CCITT. Try this:
using System;
public enum InitialCrcValue { Zeros, NonZero1 = 0xffff, NonZero2 = 0x1D0F }
public class Crc16Ccitt {
const ushort poly = 4129;
ushort[] table = new ushort[256];
ushort initialValue = 0;
public ushort ComputeChecksum(byte[] bytes) {
ushort crc = this.initialValue;
for(int i = 0; i < bytes.Length; ++i) {
crc = (ushort)((crc << 8) ^ table[((crc >> 8) ^ (0xff & bytes[i]))]);
}
return crc;
}
public byte[] ComputeChecksumBytes(byte[] bytes) {
ushort crc = ComputeChecksum(bytes);
return BitConverter.GetBytes(crc);
}
public Crc16Ccitt(InitialCrcValue initialValue) {
this.initialValue = (ushort)initialValue;
ushort temp, a;
for(int i = 0; i < table.Length; ++i) {
temp = 0;
a = (ushort)(i << 8);
for(int j = 0; j < 8; ++j) {
if(((temp ^ a) & 0x8000) != 0) {
temp = (ushort)((temp << 1) ^ poly);
} else {
temp <<= 1;
}
a <<= 1;
}
table[i] = temp;
}
}
}
Source:
http://sanity-free.org/133/crc_16_ccitt_in_csharp.html

C# byte[] to List<bool>

From bool[] to byte[]: Convert bool[] to byte[]
But I need to convert a byte[] to a List where the first item in the list is the LSB.
I tried the code below but when converting to bytes and back to bools again I have two totally different results...:
public List<bool> Bits = new List<bool>();
public ToBools(byte[] values)
{
foreach (byte aByte in values)
{
for (int i = 0; i < 7; i++)
{
Bits.Add(aByte.GetBit(i));
}
}
}
public static bool GetBit(this byte b, int index)
{
if (b == 0)
return false;
BitArray ba = b.Byte2BitArray();
return ba[index];
}
You're only considering 7 bits, not 8. This instruction:
for (int i = 0; i < 7; i++)
Should be:
for (int i = 0; i < 8; i++)
Anyway, here's how I would implement it:
byte[] bytes = ...
List<bool> bools = bytes.SelectMany(GetBitsStartingFromLSB).ToList();
...
static IEnumerable<bool> GetBitsStartingFromLSB(byte b)
{
for(int i = 0; i < 8; i++)
{
yield return (b % 2 == 0) ? false : true;
b = (byte)(b >> 1);
}
}

Categories

Resources