I am new to using Modbus Communication, and I found some other related threads here but unfortunately, it was for other languages or using TCP rather than RTU connection for Modbus.
So I have this segment of C# code that I can use to send data:
byte address = Convert.ToByte(txtSlaveID.Text);
ushort start = Convert.ToUInt16(txtWriteRegister.Text);
short[] value = new short[1];
if(Int16.TryParse(txtWriteValue.Text, out short numberValue))
{
value[0] = numberValue; //This part works!
}
else
{
value = new short[3] { 0x52, 0x4E, 0x56 }; //This is where I am trying to send letters/ASCII
}
try
{
mb.SendFc16(address, start, (ushort)value.Length, value);
}
catch (Exception err)
{
WriteLog("Error in write function: " + err.Message);
}
WriteLog(mb.modbusStatus);
So when I want to send down a single value, this code works. It will take the short array and dow the following to build the packet:
//Put write values into message prior to sending:
for (int i = 0; i < registers; i++)
{
message[7 + 2 * i] = (byte)(values[i] >> 8);
message[8 + 2 * i] = (byte)(values[i]);
}
So as you can see I attempted to use the hex values in the array, and send them down to the registers.
How can I modify the first sample of code to be able to send down HEX values, and write out characters into the register space in the first image?
I think you are not able to write the character at the device's register.
You need not to store the hex values into the short array. Simply store the characters into array, before writing them into the register convert them into byte.
Note - Whatever data will be written into the devices register, should be in byte.
Related
I want to create same message and send it with C# as I do it with C++ where all works. Note that I have C# client where I have troubles, C++ client where all works fine and C++ server that should read messages from both C# and C++ clients.
Here is how I send the message from C++:
void ConnectAuthserverCommand::SendLogin(tcp::socket &s, const flatbuffers::FlatBufferBuilder &builder) const {
ClientOpcode opc = CLIENT_LOGIN_REQUEST;
flatbuffers::FlatBufferBuilder builder2;
auto email = builder2.CreateString("test#abv.bg");
auto password = builder2.CreateString("test");
auto loginRequest = Vibranium::CreateLoginRequest(builder2, email, password);
builder2.FinishSizePrefixed(loginRequest);
size_t size2 = builder2.GetSize();
uint8_t *buf2 = builder2.GetBufferPointer();
uint8_t *actualBuffer2 = new uint8_t[size2 + 2];
actualBuffer2[1] = (opc >> 8);
actualBuffer2[0] = (opc&0xFF);
memcpy(actualBuffer2 + 2, buf2, size2);
boost::asio::write(s, boost::asio::buffer(actualBuffer2,size));
}
ClientOpcode is as follows:
enum ClientOpcode : uint16_t{
CLIENT_AUTH_CONNECTION = 0x001,
CLIENT_LOGIN_REQUEST = 0x002,
CLIENT_NUM_MSG_TYPES = 0x003,
};
What I do is the following: I get a ClientOpcode which I want to put infront of FlatBuffers message. So I create an array of uint8_t which I extend with exactly 2 bytes(Because the size of uint16_t is 2 bytes.) Than on the server I read the first 2 bytes in order to get the header and here is how I do that:
void Vibranium::Client::read_header() {
auto self(shared_from_this());
_packet.header_buffer.resize(_packet.header_size);
boost::asio::async_read(socket,
boost::asio::buffer(_packet.header_buffer.data(), _packet.header_size),
[this, self](boost::system::error_code ec,std::size_t bytes_transferred)
{
if ((boost::asio::error::eof == ec) || (boost::asio::error::connection_reset == ec))
{
Disconnect();
}
else
{
assert(_packet.header_buffer.size() >= sizeof(_packet.headerCode));
std::memcpy(&_packet.headerCode, &_packet.header_buffer[0], sizeof (_packet.headerCode));
if(_packet.headerCode)
read_size();
else
Logger::Log("UNKNOWN HEADER CODE", Logger::FatalError);
}
});
}
So far so good, however I am not able to send correctly formatted same message from the C# client. Note that I send exactly same data, take a look:
Client authClient = GameObject.Find("Client").GetComponent<AuthClient>().client; // This is how I get Client class instance.
ClientOpcode clientOpcode = ClientOpcode.CLIENT_LOGIN_REQUEST;
var builder = new FlatBuffers.FlatBufferBuilder(1);
var email = builder.CreateString("test#abv.bg");
var password = builder.CreateString("test");
var loginRequest = LoginRequest.CreateLoginRequest(builder, email, password);
builder.FinishSizePrefixed(loginRequest.Value);
authClient.Send(builder, clientOpcode);
And here is how I actually prepend the header and send the data in C#:
public static Byte[] PrependClientOpcode(FlatBufferBuilder byteBuffer, ClientOpcode code)
{
var originalArray = byteBuffer.SizedByteArray();
byte[] buffer = new byte[originalArray.Length + 2];
buffer[1] = (byte)((ushort)code / 0x0100);
buffer[0] = (byte)code;
Array.Copy(originalArray, 0, buffer, 2, originalArray.Length);
return buffer;
}
public void Send(FlatBufferBuilder builder, ClientOpcode opcode)
{
byte[] buffer = builder.SizedByteArray();
var bufferToSend = PrependClientOpcode(builder, opcode);
if (bufferToSend.Length > MaxMessageSize)
{
Logger.LogError("Client.Send: message too big: " + bufferToSend.Length + ". Limit: " + MaxMessageSize);
return;
}
if (Connected)
{
// respect max message size to avoid allocation attacks.
if (bufferToSend.Length <= MaxMessageSize)
{
// add to send queue and return immediately.
// calling Send here would be blocking (sometimes for long times
// if other side lags or wire was disconnected)
sendQueue.Enqueue(bufferToSend);
sendPending.Set(); // interrupt SendThread WaitOne()
}
}
else
{
Logger.LogWarning("Client.Send: not connected!");
}
}
ClientOpcode enum on C# is as follows:
public enum ClientOpcode : ushort
{
CLIENT_AUTH_CONNECTION = 0x001,
CLIENT_LOGIN_REQUEST = 0x002,
CLIENT_NUM_MSG_TYPES = 0x003,
}
I think I can use ushort as a replacement of uint16_t in C#. That is why ClientOpcode is ushort.
When I send the message I get error on the client saying UNKNOWN HEADER CODE. If you take a look at the C++ server code to read the header you'll see that this message is displayed when the server is unable to read the header code. So somehow I am unable to place the ClientOpcode header correctly infront of the TCP message send from the C# client.
In order to find out what are the differences I installed WireShark on the host to track both messages. Here are they:
This one is from the correctly working C++ client:
And this one is the dump of the C# client:
As you can see on the second image of the TCP dump the Length of is bigger. C++ message is with length of 58 where C# message's length is 62. Why?
The C++ client is sending data:
0200340000000c00000008000c00040008000800000014000000040000000400000074657374000000000b00000074657374406162762e626700
When the C# client is sending:
0000003a0200340000000c00000008000c00040008000800000014000000040000000400000074657374000000000b00000074657374406162762e626700
The C# client is adding to it's message in front 0000003a. If I remove that messages should be the same and all will work.
Why is my C# client adding those extra data in front and how can I fix it?
I've been breaking my head over a bug in this system I've been building. Basically, I use sockets to communicate between two C# applications. Or rather a Unity C# script server and a C# client application.
With manual tests, the system works perfectly fine, no anomalies whatsoever.
In order to test performance and multi-user functionality, I wrote up a tester class which launches multiple threads(clients), and have those fire X amount of messages at the server. Here's where my problem occurs...Sometimes.
When a Socket sends or receives, it returns an integer container the amount of bytes that was sent/received. When the problem occurs, I can see that the correct amount of bytes arrived at the server. However, after putting the bytes into a string, suddenly I'm left with an empty string, instead of the message I'd normally see here.
I'm at a loss at to what's causing this problem. I'm using Encoding.Default.GetString() to translate the bytes into a string.
Any help is appreciated!
David
public void ReceiveFromClient (Socket handlerSocket)
{
serverBuffer = new byte[iBufferSize]; //iBufferSize = 8192;
int i = handlerSocket.Receive (serverBuffer);
Debug.Log ("Bytes received: " + i);
string message = Encoding.UTF8.GetString (serverBuffer, 0, i);
Debug.Log ("Message received: " + message);
//Do stuff with the message
}
bool SendMessageToUnity(string input)
{//returns a bool saying whether the message was sent or not
if (clientSocket != null)
{
if (clientSocket.Connected)
{
byte[] bytes = Encoding.UTF8.GetBytes(input+"|");
txtOutput.BeginInvoke(new Action(() => txtOutput.AppendText("Sending message: " + Encoding.UTF8.GetString(bytes) + Environment.NewLine)));
int i = clientSocket.Send(bytes);
txtOutput.BeginInvoke(new Action(() => txtOutput.AppendText("Sending "+i+" bytes. "+ Environment.NewLine)));
return true;
}
}
return false;
}
Look for for a zero value ('\0') in your array of bytes before converting it to a string.
private string GetString(byte[] data)
{
data = data.Where(b => b != 0).ToArray();
return Encoding.UTF8.GetString(data);
}
If you get the byte array correctly than the problem in the Encoding.
Check the sending Encoding usually UTF8 but you have to check it out.
and then var inputStr = Encoding.UTF8.GetString(InputByteArray);
^^
I'm trying to communicate between C# and C++ with varying amounts of success.
I am able to send a message between the two using reply/request, but the doubles that I am receiving are not correct.
For debugging purposes and understanding, I am currently running the following:
Clrzmq 3.0 rc1, Google ProtocolBuffer 2.5, Protobuf-csharp-port-2.4, ZeroMQ-3.2.3
.Proto
package InternalComm;
message Point
{
optional double x = 1;
optional double y = 2;
optional string label = 3;
}
server.cpp (the relevant part)
while (true) {
zmq::message_t request;
// Wait for next request from client
socket.recv (&request);
zmq::message_t reply (request.size());
memcpy ((void*)reply.data(), request.data(), request.size());
socket.send(reply);
}
client.cs (the relevant part)
public static Point ProtobufPoint(Point point)
{
Point rtn = new Point(0,0);
using (var context = ZmqContext.Create())
{
using (ZmqSocket requester = context.CreateSocket(SocketType.REQ))
{
requester.Connect("tcp://localhost:5555");
var p = InternalComm.Point.CreateBuilder().SetX(point.X).SetY(point.Y).Build().ToByteArray();
requester.Send(p);
string reply = requester.Receive(Encoding.ASCII);
Console.WriteLine("Input: {0}", point);
byte[] bytes = System.Text.Encoding.ASCII.GetBytes(reply);
var message = InternalComm.Point.ParseFrom(bytes);
rtn.X = message.X;
rtn.Y = message.Y;
Console.WriteLine("Output: {0}", rtn);
}
}
return rtn;
}
On the C# side, Point is a very simple struct. Just x and y properties.
Here is what I'm getting from my unit tests as a result of running the above code.
Input (1.31616874365468, 4.55516872325469)
Output (0.000473917985115791, 4.55516872323627)
Input (274.120398471829, 274.128936418736)
Output (274.077917334613, 274.128936049925)
Input (150.123798461987, 2.345E-12)
Output (145.976459594794, 1.11014954927532E-13)
Input (150, 0)
Output (145.96875, 0)
I am thinking that the problem is my protobuf code is incorrect (doubtful this is a bug on Skeet's side). I am also running under the assumption that server.cpp is doing nothing to the message but returning it as is.
Thoughts?
The requestor.Receive(Encoding.ASCII) call is designed to receive a string, not a block of bytes. You are asking the ZmqSocket instance to return the message as an ASCII string, which is highly likely to cause modifications to the content. If you're sending a byte array, receive a byte array.
Try this:
int readSize;
byte[] reply = requester.Receive(null, out readSize);
var message = InternalComm.Point.ParseFrom(reply);
The readSize variable will contain the actual number of valid bytes in the received block, which may vary from the size of the reply array, so you may need to slice up the array to make it palatable to ProtoBuf.
Why the ASCII --> bytes --> parsing step? If you're parsing bytes, you should read bytes. If you're parsing text, you should read that.
Unnecessary charset-conversions look very likely to be erroneous.
I am currently working on a robotics project with Arduino. I want to access the serial port from different methods at different times.
For instance, I want to read the ADC at time t1 and get the motor currents at time t2. So I create readADC() and motorCurrents() methods which should both return int arrays of different sizes. The serial port data received is given below.
private void serialPort1_DataReceived(object sender, System.IO.Ports.SerialDataReceivedEventArgs e)
{
int n = serialPort1.BytesToRead;
serialPort1.Read(data, 0, data.Length);
}
I have implemented all the relevant coding on the Arduino side. I also have set the serial port. I need to implement the following commands in C#
private int[] ReadADC()
{
string input = "AN\n"; // This is the command for reading the ADC on the Wildthumper board.
serialPort1.Write(input);
// The Wildthumper now sends 11 bytes, the last of which is '*' and the first
// 10 bytes are the data that I want. I need to combine two bytes together
// from data received and make five values and return it.
for (int i = 0; i < 5; i++)
{
adcValues[i] = data[0 + i * 2] <<8+ data[1 + i * 2];
}
return adcValues;
// Where data is the bytes received on serial port;
}
Similarly:
private int[] getMotorCurrents()
{
string input = "MC\n"; // Wildthumper board command
serialPort1.Write(input);
// The Wildthumper now sends 5 bytes with the last one being '*'.
// And the first four bytes are the data that I want.
for (int i = 0; i < 2; i++)
{
MotorCurrents[i] = data[0 + i * 2] <<8 +data[1 + i * 2];
}
return MotorCurrents;
}
First of all, the number of bytes sent to me change. So how can I use a global variable? For data (the variable used to store serial data received as given above)?
You need to create a global variable and save the data to it when data received fires. This isn't hard.
Here is a code example:
public class myclass{
public string arduinoData = "";
private void serialPort1_DataReceived(
object sender, System.IO.Ports.SerialDataReceivedEventArgs e)
{
this.arduinoData = serialPort1.ReadLine(data, 0, data.Length);
}
//....The rest of your code, such as main methods, etc...
}
I read the tutorials and so, but I am not getting it. It does let you send packets, but how can you tell Winpcap where to send those packets? Is there any header I should put on the packets so it will know to which ip/port's to forward it? I mean. Let's imagine I want to send some data to my MSN, as if I had wrote something to someone on my list. I can use the sendpacket(), but it will only take the packet/byte array as argument, not specifing to which app/ip/port so send it.
Thanks
You don't tell Winpcap where to send packets. You tell it to put a packet on the wire. The network switch will send the packet to the right destination. The TCP stack on the receiving end will send the packet to the right application/service. Obviously this means the routing information has to be in the packet itself.
To take your example, you'd need to put the IP address and TCP port of the appropriate MSN server in the packet. If you don't, your networking hardware will discard or misroute that packet.
This is how i sent an ARP request over the wire.
1. Define structures for the protocols i.e if you want to send ARP packets you will need a structure that will contain the data link layer(Ethernet header) and network layer (ARP Header). Correspondingly if you want to send a tcp packet over IP you will need a data structure for the ethernet header, ip header and tcp header.
once you defined the structures, initialize an instance of the structure with values you want i.e. if you want the packet to go to all machines in the network set the destination mac value of the Ethernet header to ff:ff:ff:ff:ff:ff if you want to send the packet to machine X with(IP address 192.168.0.88) then set the destination address in the ip layer to that value.
Once done you will need to declare a char* array and copy all the structures to the char* array to create a byte sequence and send it over the wire.
//Just to show you what i mean by defining structures doesn't //relate to the rest of the code snippet
typedef struct IP_header
{
u_char VersionNInternetHeaderLength; // Version (4 bits) + Internet header length (4 bits)
/*u_char version:4;
u_char HeaderLength:4;*/
u_char Type; // Type of service
u_short TotalLength; // Total length
u_short Identification; // Identification
u_char rsv : 1;
u_char df : 1;
u_char mf : 1;
u_char FragmentOffset1 : 5;
u_char FragmentOffset2;
//u_short Flags_fo; // Flags (3 bits) + Fragment offset (13 bits)
u_char TimeToLive; // Time to live
u_char Protocol; // Next level Protocol of the encapsulated payload
u_short Checksum; // Header checksum
IP_address SourceAddress; // Source address
IP_address DestinationAddress; // Destination address
u_int OptionNPadding; // Option + Padding
IP_header()
{
mf = 0;
rsv = 0;
df = 0;
FragmentOffset1 = 0;
FragmentOffset2 = 0;
TimeToLive = 128;
TotalLength = sizeof(IP_header);
Identification = 0xABCD;
Checksum = 0xABCD;
OptionNPadding = 0;
}
}IP_header;
Ethernet_header EthernetHeader;// = (Ethernet_header*)malloc(sizeof(Ethernet_header));
ARP_header ARPHeader ;//= (ARP_header*)malloc(sizeof(ARP_header));
ARPHeader.HardwareType = htons(1);
ARPHeader.ProtocolType = htons(0x800);
ARPHeader.OPCODE = htons(1);
ARPHeader.HeaderLength = 6;
ARPHeader.ProtocolLength = 4;
ARPHeader.SenderMAC = MY_FAKE_MAC;
ARPHeader.SenderIP = MY_IP;
ARPHeader.TargetMAC = MAC_address();
ARPHeader.TargetIP = Whose;
EthernetHeader.DestinationMAC = BROADCASTMAC;
EthernetHeader.SourceMAC = MY_FAKE_MAC;
EthernetHeader.EtherType = htons(0x806);
u_char* packet = (u_char*)malloc(sizeof(EthernetHeader) + sizeof(ARPHeader));
memcpy(packet, &EthernetHeader, sizeof(EthernetHeader));
memcpy(packet + sizeof(EthernetHeader), &ARPHeader, sizeof(ARPHeader));
SendPacket(packet);