I'm trying to translate some Code written in C to C# (Compact Framework 2.0)
(It's for a Windows CE Device with an RFID Reader).
And in C this system call works fine, but it does not work in C#:
Code In C
HANDLE m_Power =NULL; //<-- This HANDLE is set correctly before call "ControlDevice"
int ControlDevice(int device, BYTE power_state)
{
bool bRet;
DWORD tcBuffer2,dwBytesReturned;
unsigned int a = sizeof(power_state);
bRet=DeviceIoControl( m_Power,
device,
&power_state,
sizeof(power_state),
(PBYTE)&tcBuffer2,
sizeof(DWORD),
&dwBytesReturned,
NULL);
if(bRet)
return 1;
else
return -1;
}
CODE IN C#
int ControlDevice(int device, byte[] power_state)
{
try
{
bool bRet = false;
int bytesReturned = 0;
byte[] tcBuffer2 = new byte[1];
tcBuffer2[0] = 0;
bRet = Device_WinApi.DeviceIoControlCE(m_Power,
device,
power_state,
(int)Marshal.SizeOf(power_state),
tcBuffer2,
(int)Marshal.SizeOf(tcBuffer2),
out bytesReturned,
IntPtr.Zero);
if (bRet)
return 1;
else
{
int LastError = Device_WinApi.GetLastError();
return -1;
}
}
catch (Exception e)
{
return -1;
}
}
The DLL is imported in this way:
[return: MarshalAs(UnmanagedType.Bool)]
[DllImport("coredll.dll", EntryPoint = "DeviceIoControl", SetLastError = true)]
internal static extern bool DeviceIoControlCE(
IntPtr hDevice,
long dwIoControlCode,
byte[] lpInBuffer,
int nInBufferSize,
byte[] lpOutBuffer,
int nOutBufferSize,
out int lpBytesReturned,
IntPtr lpOverlapped);
In the current implementation in C#, bRet = false, and I should be true, as this is the value obtained in C
this return zero too:
int LastError = Device_WinApi.GetLastError()
Any help will be really appreciated!
Thank you advance for your time!!
Related
I have been banging my head against a problem for days now. I would like your help.
I am trying to interface to I2C from a board running Windows CE7. The board is a Boundary Devices Nitrogen6X.
I am trying to code this in C#.
After a lot of googling and trial and error I can now do almost everything with the I2C (by that I mean I wrapped most commands in methods that work). Of course, the one thing that I cannot do yet is Reading/Writing. I have been trying a few different implementations, porting C and C++ code that supposedly worked. To no avail. Currently I am putting more effort in the two implementations I will copy here.
Neither of these implementations work for me. Both enter the error management portion, and both report error number 87 (ERROR_INVALID_PARAMETER).
Does anyone have experience on these kind of issues? could someone point out what I am doing wrong?
Edit 1: I should probably mention that I am trying to "see" some signals on the SDA and SCL pins of I2C3: of the board by simply plugging an oscilloscope to them. There is no actual device connected on the I2C bus. I would expect this to give me some sort of error after the first byte (addres+Read/Write) has been sent, because no acknowledge bit would be received. However, I see that error 87 in my code, and no change on the signals as seen from the scope (both remain high on idle).
(Code snippets follow)
The first one uses pointers and stuff, and is probably closer to C++ code:
StructLayout(LayoutKind.Sequential)]
unsafe public struct UNSAFE_I2C_PACKET
{
//[MarshalAs(UnmanagedType.U1)]
public byte byAddr; //I2C slave device address
//[MarshalAs(UnmanagedType.U1)]
public byte byRw; //Read = I2C_Read or Write = I2C_Write
//[MarshalAs(UnmanagedType.LPArray)]
public byte* pbyBuf; //Message Buffer
//[MarshalAs(UnmanagedType.U2)]
public Int16 wLen; //Message Buffer Length
//[MarshalAs(UnmanagedType.LPStruct)]
public int* lpiResult; //Contain the result of last operation
}
[StructLayout(LayoutKind.Sequential)]
unsafe public struct UNSAFE_I2C_TRANSFER_BLOCK
{
//public I2C_PACKET pI2CPackets;
public UNSAFE_I2C_PACKET* pI2CPackets;
public Int32 iNumPackets;
}
[DllImport("coredll.dll", EntryPoint = "DeviceIoControl", SetLastError = true)]
unsafe internal static extern int DeviceIoControlCE(
IntPtr hDevice, //file handle to driver
uint dwIoControlCode, //a correct call to CTL_CODE
[In, Out]byte* lpInBuffer,
uint nInBufferSize,
byte* lpOutBuffer,
uint nOutBufferSize,
uint* lpBytesReturned,
int* lpOverlapped);
unsafe public void ReadI2C(byte* pBuf, int count)
{
int ret;
int iResult;
UNSAFE_I2C_TRANSFER_BLOCK I2CXferBlock = new UNSAFE_I2C_TRANSFER_BLOCK();
UNSAFE_I2C_PACKET i2cPckt = new UNSAFE_I2C_PACKET();
//fixed (byte* p = pBuf)
{
i2cPckt.pbyBuf = pBuf;// p;
i2cPckt.wLen = (Int16)count;
i2cPckt.byRw = I2C_READ;
i2cPckt.byAddr = BASE;
i2cPckt.lpiResult = &iResult;
I2CXferBlock.iNumPackets = 1;
//fixed (I2C_PACKET* pck = &i2cPckt)
{
I2CXferBlock.pI2CPackets = &i2cPckt; // pck;
//fixed (I2C_TRANSFER_BLOCK* tBlock = &I2CXferBlock)
{
if (DeviceIoControlCE(_i2cFile,
I2C_IOCTL_TRANSFER,
(byte*)&I2CXferBlock,//tBlock,
(uint)sizeof(UNSAFE_I2C_TRANSFER_BLOCK),//Marshal.SizeOf(I2CXferBlock),
null,
0,
null,
null) == 0)
{
int error = GetLastError();
diag("Errore nella TRANSFER");
diag(error.ToString());
}
}
}
}
}
The second option I am working on marshals stuff around between managed and unmanaged:
[StructLayout(LayoutKind.Sequential)]
public struct I2C_PACKET
//public class I2C_PACKET
{
//[MarshalAs(UnmanagedType.U1)]
public Byte byAddr; //I2C slave device address
//[MarshalAs(UnmanagedType.U1)]
public Byte byRw; //Read = I2C_Read or Write = I2C_Write
//[MarshalAs(UnmanagedType.LPArray)]
public IntPtr pbyBuf; //Message Buffer
//[MarshalAs(UnmanagedType.U2)]
public Int16 wLen; //Message Buffer Length
//[MarshalAs(UnmanagedType.LPStruct)]
public IntPtr lpiResult; //Contain the result of last operation
}
[StructLayout(LayoutKind.Sequential)]
public struct I2C_TRANSFER_BLOCK
{
//public I2C_PACKET pI2CPackets;
//[MarshalAs(UnmanagedType.LPArray)]
public IntPtr pI2CPackets;
//[MarshalAs(UnmanagedType.U4)]
public Int32 iNumPackets;
}
[DllImport("coredll.dll", EntryPoint = "DeviceIoControl", SetLastError = true)]
internal static extern int DeviceIoControlCE(
IntPtr hDevice, //file handle to driver
uint dwIoControlCode, //a correct call to CTL_CODE
[In] IntPtr lpInBuffer,
uint nInBufferSize,
[Out] IntPtr lpOutBuffer,
uint nOutBufferSize,
out uint lpBytesReturned,
IntPtr lpOverlapped);
unsafe public void ReadI2C(byte[] buffer)
{
int count = buffer.Length;
I2C_TRANSFER_BLOCK I2CXFerBlock = new I2C_TRANSFER_BLOCK();
I2C_PACKET I2CPckt = new I2C_PACKET();
I2CPckt.byAddr = BASE;
I2CPckt.byRw = I2C_READ;
I2CPckt.wLen = (Int16)count;
I2CPckt.lpiResult = Marshal.AllocHGlobal(sizeof(int));
I2CPckt.pbyBuf = Marshal.AllocHGlobal(count);
//GCHandle packet = GCHandle.Alloc(I2CPckt, GCHandleType.Pinned);
I2CXFerBlock.iNumPackets = 1;
I2CXFerBlock.pI2CPackets = Marshal.AllocHGlobal(Marshal.SizeOf(I2CPckt)); //(Marshal.SizeOf(typeof(I2C_PACKET)));// //(sizeof(I2C_PACKET));//
Marshal.StructureToPtr(I2CPckt, I2CXFerBlock.pI2CPackets, false);
//I2CXFerBlock.pI2CPackets = packet.AddrOfPinnedObject();
//GCHandle xferBlock = GCHandle.Alloc(I2CXFerBlock, GCHandleType.Pinned);
IntPtr xfer = Marshal.AllocHGlobal(Marshal.SizeOf(I2CXFerBlock)); //(sizeof(I2C_TRANSFER_BLOCK)); //
Marshal.StructureToPtr(I2CXFerBlock, xfer, false);
//IntPtr xfer = xferBlock.AddrOfPinnedObject();
uint size = (uint)sizeof(I2C_TRANSFER_BLOCK);//Marshal.SizeOf(I2CXFerBlock);
uint getCnt = 0;
if ((DeviceIoControlCE(_i2cFile,
I2C_IOCTL_TRANSFER,
xfer,
size,
xfer, //IntPtr.Zero,
size, //0,
out getCnt,
IntPtr.Zero)) == 0)
{
int error = GetLastError();
diag("Errore nella TRANSFER.");
diag(error.ToString());
}
else
{
//success
I2CXFerBlock = (I2C_TRANSFER_BLOCK)Marshal.PtrToStructure(xfer, typeof(I2C_TRANSFER_BLOCK));
I2CPckt = (I2C_PACKET)Marshal.PtrToStructure(I2CXFerBlock.pI2CPackets, typeof(I2C_PACKET));
Marshal.Copy(I2CPckt.pbyBuf, buffer, 0, count);
diag("Success in TRANSFER: " + buffer[0].ToString());
}
Marshal.FreeHGlobal(I2CPckt.pbyBuf);
Marshal.FreeHGlobal(I2CXFerBlock.pI2CPackets);
Marshal.FreeHGlobal(xfer);
//packet.Free();
//xferBlock.Free();
}
Edit 2: The (supposedly) working code I have (which I am unable to run) comes from drivers I have been given, which might be partially proprietary (hence I cannot share). However I found online the header for an I2C bus, that contains the following definition:
#define I2C_MACRO_TRANSFER(hDev, pI2CTransferBlock) \
(DeviceIoControl(hDev, I2C_IOCTL_TRANSFER, (PBYTE) pI2CTransferBlock, sizeof(I2C_TRANSFER_BLOCK), NULL, 0, NULL, NULL))
I initially tried giving "null" to parameters as it's done here, but I still got the same error code.
Edit 3: From the same driver, the struct definitions:
// I2C Packet
typedef struct
{
BYTE byAddr; // I2C slave device address for this I2C operation
BYTE byRW; // Read = I2C_READ or Write = I2C_WRITE
PBYTE pbyBuf; // Message Buffer
WORD wLen; // Message Buffer Length
LPINT lpiResult; // Contains the result of last operation
} I2C_PACKET, *PI2C_PACKET;
// I2C Transfer Block
typedef struct
{
I2C_PACKET *pI2CPackets;
INT32 iNumPackets;
} I2C_TRANSFER_BLOCK, *PI2C_TRANSFER_BLOCK;
Edit 4: I tried implementing version passing a ref to my structures, as #ctacke suggested in his comment. I still get the same error, so I guess I must have done womthing different from the way he thought it. Here is the snippet:
[StructLayout(LayoutKind.Sequential)]
public struct REF_I2C_PACKET //public class REF_I2C_PACKET //
{
//[MarshalAs(UnmanagedType.U1)]
public Byte byAddr; //I2C slave device address
//[MarshalAs(UnmanagedType.U1)]
public Byte byRw; //Read = I2C_Read or Write = I2C_Write
//[MarshalAs(UnmanagedType.LPArray)]
public IntPtr pbyBuf; //Message Buffer
//[MarshalAs(UnmanagedType.U2)]
public Int16 wLen; //Message Buffer Length
//[MarshalAs(UnmanagedType.LPStruct)]
public IntPtr lpiResult; //Contain the result of last operation
}
[StructLayout(LayoutKind.Sequential)]
public struct REF_I2C_TRANSFER_BLOCK //public class REF_I2C_TRANSFER_BLOCK //
{
//public I2C_PACKET pI2CPackets;
public IntPtr pI2CPackets;
//[MarshalAs(UnmanagedType.U4)]
public Int32 iNumPackets;
//[MarshalAs(UnmanagedType.LPArray)]
//[MarshalAs(UnmanagedType.ByValArray, ArraySubType = UnmanagedType.LPStruct, SizeConst = 2)]
//public REF_I2C_PACKET[] pI2CPackets;
}
[DllImport("coredll.dll", EntryPoint = "DeviceIoControl", SetLastError = true)]
unsafe internal static extern int DeviceIoControlCE(
IntPtr hDevice, //file handle to driver
uint dwIoControlCode, //a correct call to CTL_CODE
//[MarshalAs(UnmanagedType.AsAny)]
//[In] object lpInBuffer, //
ref REF_I2C_TRANSFER_BLOCK lpInBuffer,
uint nInBufferSize,
byte* lpOutBuffer, //ref REF_I2C_TRANSFER_BLOCK lpOutBuffer,
uint nOutBufferSize,
out uint lpBytesReturned, //uint* lpBytesReturned,
int* lpOverlapped);
unsafe public void RefReadI2C(byte[] buffer)
{
int count = buffer.Length;
REF_I2C_TRANSFER_BLOCK I2CXFerBlock = new REF_I2C_TRANSFER_BLOCK();
REF_I2C_PACKET[] I2CPckt = new REF_I2C_PACKET[2];
I2CPckt[0] = new REF_I2C_PACKET();
I2CPckt[1] = new REF_I2C_PACKET();
I2CPckt[0].byAddr = BASE;
I2CPckt[0].byRw = I2C_READ;
I2CPckt[0].wLen = (Int16)count;
I2CPckt[0].lpiResult = Marshal.AllocHGlobal(sizeof(int));
I2CPckt[0].pbyBuf = Marshal.AllocHGlobal(count);
Marshal.Copy(buffer, 0, I2CPckt[0].pbyBuf, count);
I2CPckt[1].byAddr = BASE;
I2CPckt[1].byRw = I2C_READ;
I2CPckt[1].wLen = (Int16)count;
I2CPckt[1].lpiResult = Marshal.AllocHGlobal(sizeof(int));
I2CPckt[1].pbyBuf = Marshal.AllocHGlobal(count);
Marshal.Copy(buffer, 0, I2CPckt[0].pbyBuf, count);
I2CXFerBlock.iNumPackets = 2;
I2CXFerBlock.pI2CPackets = Marshal.AllocHGlobal(Marshal.SizeOf(I2CPckt[0])*I2CPckt.Length);
uint size = (uint)Marshal.SizeOf(I2CXFerBlock); //sizeof(REF_I2C_TRANSFER_BLOCK);//Marshal.SizeOf(I2CXFerBlock);
//size += (uint)(Marshal.SizeOf(I2CPckt[0]) * I2CPckt.Length);
uint getCnt = 0;
if ((DeviceIoControlCE(_i2cFile,
I2C_IOCTL_TRANSFER,
ref I2CXFerBlock,
size,
null, //IntPtr.Zero,
0, //0,
out getCnt,
null)) == 0)
{
int error = GetLastError();
diag("Errore nella TRANSFER.");
diag(error.ToString());
}
else
{
//success
I2CPckt = (REF_I2C_PACKET[])Marshal.PtrToStructure(I2CXFerBlock.pI2CPackets, typeof(REF_I2C_PACKET[]));
Marshal.Copy(I2CPckt[0].pbyBuf, buffer, 0, count);
diag("Success in TRANSFER: " + buffer[0].ToString());
}
Marshal.FreeHGlobal(I2CPckt[0].pbyBuf);
Marshal.FreeHGlobal(I2CPckt[0].lpiResult);
}
Edit 5:
I found online (http://em-works.googlecode.com/svn/trunk/WINCE600/PLATFORM/COMMON/SRC/SOC/COMMON_FSL_V2_PDK1_9/I2C/PDK/i2c_io.cpp) The following code:
BOOL I2C_IOControl(DWORD hOpenContext, DWORD dwCode, PBYTE pBufIn,
DWORD dwLenIn, PBYTE pBufOut, DWORD dwLenOut,
PDWORD pdwActualOut)
{
/*stuff*/
case I2C_IOCTL_TRANSFER:
{
#define MARSHAL 1
#if MARSHAL
DuplicatedBuffer_t Marshalled_pInBuf(pBufIn, dwLenIn, ARG_I_PTR);
pBufIn = reinterpret_cast<PBYTE>( Marshalled_pInBuf.ptr() );
if( (dwLenIn > 0) && (NULL == pBufIn) )
{
return FALSE;
}
#endif
I2C_TRANSFER_BLOCK *pXferBlock = (I2C_TRANSFER_BLOCK *) pBufIn;
if (pXferBlock->iNumPackets<=0)
{
return FALSE;
}
#if MARSHAL
MarshalledBuffer_t Marshalled_pPackets(pXferBlock->pI2CPackets,
pXferBlock->iNumPackets*sizeof(I2C_PACKET),
ARG_I_PTR);
I2C_PACKET *pPackets = reinterpret_cast<I2C_PACKET *>(Marshalled_pPackets.ptr());
if( (NULL == pPackets) )
{
return FALSE;
}
#else
I2C_PACKET *pPackets = pXferBlock->pI2CPackets;
#endif
#if MARSHAL
struct Marshalled_I2C_PACKET
{
MarshalledBuffer_t *pbyBuf;
MarshalledBuffer_t *lpiResult;
} *Marshalled_of_pPackets;
Marshalled_of_pPackets = new Marshalled_I2C_PACKET[pXferBlock->iNumPackets];
if (Marshalled_of_pPackets==0)
{
return FALSE;
}
MarshalledBuffer_t *pMarshalled_ptr;
int i;
// Map pointers for each packet in the array
for (i = 0; i < pXferBlock->iNumPackets; i++)
{
switch( pPackets[i].byRW & I2C_METHOD_MASK )
{
case I2C_RW_WRITE:
pMarshalled_ptr = new MarshalledBuffer_t(
pPackets[i].pbyBuf,
pPackets[i].wLen,
ARG_I_PTR,
FALSE, FALSE);
if (pMarshalled_ptr ==0)
{
bRet = FALSE;
goto cleanupPass1;
}
if (pMarshalled_ptr->ptr()==0)
{
bRet = FALSE;
delete pMarshalled_ptr;
goto cleanupPass1;
}
break;
case I2C_RW_READ:
pMarshalled_ptr = new MarshalledBuffer_t(
pPackets[i].pbyBuf,
pPackets[i].wLen,
ARG_O_PTR, FALSE, FALSE);
if (pMarshalled_ptr ==0)
{
bRet = FALSE;
goto cleanupPass1;
}
if (pMarshalled_ptr->ptr()==0)
{
bRet = FALSE;
delete pMarshalled_ptr;
goto cleanupPass1;
}
break;
default:
{
bRet = FALSE;
goto cleanupPass1;
}
}
pPackets[i].pbyBuf = reinterpret_cast<PBYTE>(pMarshalled_ptr->ptr());
Marshalled_of_pPackets[i].pbyBuf = pMarshalled_ptr;
}
for (i = 0; i < pXferBlock->iNumPackets; i++)
{
pMarshalled_ptr = new MarshalledBuffer_t(
pPackets[i].lpiResult, sizeof(INT),
ARG_O_PDW, FALSE, FALSE);
if (pMarshalled_ptr ==0)
{
bRet = FALSE;
goto cleanupPass2;
}
if (pMarshalled_ptr->ptr()==0)
{
bRet = FALSE;
delete pMarshalled_ptr;
goto cleanupPass2;
}
pPackets[i].lpiResult = reinterpret_cast<LPINT>(pMarshalled_ptr->ptr());
Marshalled_of_pPackets[i].lpiResult = pMarshalled_ptr;
}
#endif
bRet = pI2C->ProcessPackets(pPackets, pXferBlock->iNumPackets);
#if MARSHAL
DEBUGMSG (ZONE_IOCTL|ZONE_FUNCTION, (TEXT("I2C_IOControl:I2C_IOCTL_TRANSFER -\r\n")));
i = pXferBlock->iNumPackets;
cleanupPass2:
for (--i; i>=0; --i)
{
delete Marshalled_of_pPackets[i].lpiResult;
}
i = pXferBlock->iNumPackets;
cleanupPass1:
for (--i; i>=0; --i)
{
delete Marshalled_of_pPackets[i].pbyBuf;
}
delete[] Marshalled_of_pPackets;
#endif
break;
}
/*stuff*/
}
I cannot claim to understand 100% of it, but from the Windows naming conventions (http://msdn.microsoft.com/en-us/library/windows/desktop/aa378932(v=vs.85).aspx) it would appear that the size parameter I should send is the total number of bytes of my transfer, including everything. I have tried to figure that number out myself, but I have so far not been able to. Alternatively, I guess it would be possible to try and do something to the structures I have to turn them into a byte array. Only I guess that it would need to have a specific order of the bytes in it for the system to understand it.
Can anyone pitch in on that?
I need to invoke a native DLL from C# code. As I am not very familiar with C/C++, I can't figure out how a structure defined in C should be declared in C# so it can be invoked. The problem is that two parameters seems to be an array of structs, which I don't know how to declare this in C# (see last code block):
c++ header file:
typedef enum
{
OK = 0,
//others
} RES
typedef struct
{
unsigned char* pData;
unsigned int length;
} Buffer;
RES SendReceive(uint32 deviceIndex
Buffer* pReq,
Buffer* pResp,
unsigned int* pReceivedLen,
unsigned int* pStatus);
c# declaration:
enum
{
OK = 0,
//others
} RES
struct Buffer
{
public uint Length;
public ??? Data; // <-- I guess it's byte[]
}
[DllImport("somemodule.dll", CallingConvention = CallingConvention.Cdecl)]
public static extern uint SendReceive(
uint hsmIndex,
uint originatorId,
ushort fmNumber,
??? pReq, // <-- should this be ref Buffer[] ?
uint reserved,
??? pResp, // <-- should this be ref Buffer[] ?
ref uint pReceivedLen,
ref uint pFmStatus);
in an equivalent java client, i found that the parameter is not just one Buffer but an array of Buffers. In C# it would look like this:
var pReq = new Buffer[]
{
new Buffer { Data = new byte[] { 1, 0 }, Length = (uint)2 },
new Buffer {Data = requestStream.ToArray(), Length = (uint)requestStream.ToArray().Length },
//according to the header file, the last item must be {NULL, 0}
new Buffer { Data = null, Length = 0 }
};
var pResp = new Buffer[]
{
new Buffer { Data = new byte[0x1000], Length = 0x1000 },
//according to the header file, the last item must be {NULL, 0}
new Buffer { Data = null, Length = 0x0 }
};
This seems strange to me because the extern C method does have a pointer to a Buffer struct (Buffer*) and not a pointer to a Buffer array (Buffer[]*).
How do I need to define the Struct in C# and the parameter types of the extern method?
Any help appreciated, Thanks.
Firstly your struct has the parameters in the wrong order. And the byte array needs to be declared as IntPtr with manual marshalling:
struct Buffer
{
public IntPtr Data;
public uint Length;
}
The p/invoke should be:
[DllImport("MyNativeDll.dll", CallingConvention=CallingConvention.Cdecl)]
static extern RES SendReceive(
uint deviceIndex,
[In] Buffer[] pReq,
[In, Out] Buffer[] pResp,
out uint pReceivedLen,
out uint pStatus
);
The byte array needs to be IntPtr so that the struct is blittable. And that's needed so that the array parameters can be declared as Buffer[].
It's going to be a bit of a pain doing the marshalling of the byte arrays. You'll want to use GCHandle to pin the managed byte arrays, and call AddrOfPinnedObject() to get the address of the pinned array for each struct in your arrays of structs. It will be worth your while writing some helper functions to make that task less painful.
Your method signature in c# should be something like:
[DllImport("MyNativeDll.dll")]
public static extern RES SendReceive (uint32 deviceIndex, ref Buffer pReq, ref Buffer pResp, ref uint pReceivedLen, ref uint pStatus);
See this project, it might hel you in the future so generate native calls from .net
http://clrinterop.codeplex.com/releases/view/14120
Based on the C++ header but without testing, have a look at the following code:
using System;
using System.Runtime.InteropServices;
using System.Text;
namespace WindowsFormsApplication1
{
public class Class1
{
public struct Buffer
{
[MarshalAs(UnmanagedType.LPStr)]
public StringBuilder pData;
public uint length;
}
[DllImport("kernel32.dll", EntryPoint = "LoadLibrary")]
static extern int LoadLibrary(string lpLibFileName);
[DllImport("kernel32.dll", EntryPoint = "GetProcAddress")]
static extern IntPtr GetProcAddress(int hModule, string lpProcName);
[DllImport("kernel32.dll", EntryPoint = "FreeLibrary")]
static extern bool FreeLibrary(int hModule);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate IntPtr SendReceive(
uint deviceIndex,
ref Buffer pReq,
ref Buffer pResp,
uint pReceivedLen,
uint pStatus);
public void ExecuteExternalDllFunction()
{
int dll = 0;
try
{
dll = LoadLibrary(#"somemodule.dll");
IntPtr address = GetProcAddress(dll, "SendReceive");
uint deviceIndex = 0;
Buffer pReq = new Buffer() { length = 0, pData = new StringBuilder() };
Buffer pResp = new Buffer() { length = 0, pData = new StringBuilder() };
uint pReceivedLen = 0;
uint pStatus = 0;
if (address != IntPtr.Zero)
{
SendReceive sendReceive = (SendReceive)Marshal.GetDelegateForFunctionPointer(address, typeof(SendReceive));
IntPtr ret = sendReceive(deviceIndex, ref pReq, ref pResp, pReceivedLen, pStatus);
}
}
catch (Exception Ex)
{
//handle exception...
}
finally
{
if (dll > 0)
{
FreeLibrary(dll);
}
}
}
}
}
I'm trying to implement a custom collation in SQLite for Windows Runtime.
The create_collation method is implemented as follows:
SQLITE_API int sqlite3_create_collation(
sqlite3*,
const char *zName,
int eTextRep,
void *pArg,
int(*xCompare)(void*,int,const void*,int,const void*)
);
So far I have the following C# signature:
[DllImport("sqlite3", EntryPoint = "sqlite3_create_collation", CallingConvention = CallingConvention.Cdecl)]
public static extern int CreateCollation(IntPtr db, [MarshalAs(UnmanagedType.LPStr)] string name, int textRep, object state, Compare callback);
public delegate int Compare(object pCompareArg, int size1, IntPtr Key1, int size2, IntPtr Key2);
This is the implementation:
int i = CreateCollation(db, "unicode_nocase", SQLITE_UTF8, null, CompareMethod);
/* ... */
public static int CompareMethod(object o, int i1, IntPtr s1, int i2, IntPtr s2)
{
return string.Compare(Marshal.PtrToStringUni(s1), Marshal.PtrToStringUni(s2));
}
The application compiles without errors. The call to create_collation returns zero (SQLITE_OK), but if I use the collation in a statement the following error message is returned:
no such collation sequence: unicode_nocase
source reference: https://github.com/doo/SQLite3-WinRT/tree/master/SQLite3Component
Can somebody please help me?
Thank you!
After some time looking around inside Mono.Android.SQLite, which also uses the C implementation of SQLite, I found the solution:
The problem was that the call to sqlite3_create_collation has a void* parameter which I incorrectly defined as object in C# where it should be IntPtr.
I have posted the current implementation I have below. I partially reverse engineered the solution from the Mono implementation, which calls sqlite3_create_collation twice for every collation to be registered - once with the parameter eTextRep set to SQLITE_UTF16LE and a second time with SQLITE_UTF8. I could only imagine that this might help the SQLite core to find a fast implementation for different formats in which the string values are stored. However, these require different decoding when they are converted to C# strings.
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
private delegate int CompareCallback(IntPtr pvUser, int len1, IntPtr pv1, int len2, IntPtr pv2);
[DllImport("sqlite3", CallingConvention = CallingConvention.Cdecl)]
private static extern int sqlite3_create_collation(IntPtr db, byte[] strName, int nType, IntPtr pvUser, CompareCallback func);
private const int SQLITE_UTF8 = 1;
private const int SQLITE_UTF16LE = 2;
private const int SQLITE_UTF16BE = 3;
private const int SQLITE_UTF16 = 4; /* Use native byte order */
private const int SQLITE_ANY = 5; /* sqlite3_create_function only */
private const int SQLITE_UTF16_ALIGNED = 8; /* sqlite3_create_collation only */
public void Register(IntPtr db)
{
if (db == IntPtr.Zero)
throw new ArgumentNullException("db");
//create null-terminated UTF8 byte array
string name = Name;
var nameLength = System.Text.Encoding.UTF8.GetByteCount(name);
var nameBytes = new byte[nameLength + 1];
System.Text.Encoding.UTF8.GetBytes(name, 0, name.Length, nameBytes, 0);
//register UTF16 comparison
int result = sqlite3_create_collation(db, nameBytes, SQLITE_UTF16LE, IntPtr.Zero, CompareUTF16);
if (result != 0)
{
string msg = SQLite3.GetErrmsg(db);
throw SQLiteException.New((SQLite3.Result)result, msg);
}
//register UTF8 comparison
result = sqlite3_create_collation(db, nameBytes, SQLITE_UTF8, IntPtr.Zero, CompareUTF8);
if (result != 0)
{
string msg = SQLite3.GetErrmsg(db);
throw SQLiteException.New((SQLite3.Result)result, msg);
}
}
private string GetUTF8String(IntPtr ptr, int len)
{
if (len == 0 || ptr == IntPtr.Zero)
return string.Empty;
if (len == -1)
{
do
{
len++;
}
while (Marshal.ReadByte(ptr, len) != 0);
}
byte[] array = new byte[len];
Marshal.Copy(ptr, array, 0, len);
return Encoding.UTF8.GetString(array, 0, len);
}
private string GetUTF16String(IntPtr ptr, int len)
{
if (len == 0 || ptr == IntPtr.Zero)
return string.Empty;
if (len == -1)
{
return Marshal.PtrToStringUni(ptr);
}
return Marshal.PtrToStringUni(ptr, len / 2);
}
internal int CompareUTF8(IntPtr ptr, int len1, IntPtr ptr1, int len2, IntPtr ptr2)
{
return Compare(GetUTF8String(ptr1, len1), GetUTF8String(ptr2, len2));
}
internal int CompareUTF16(IntPtr ptr, int len1, IntPtr ptr1, int len2, IntPtr ptr2)
{
return Compare(GetUTF16String(ptr1, len1), GetUTF16String(ptr2, len2));
}
I am trying to PInvoke this function (GetPackageId) from kernel32.dll:
http://msdn.microsoft.com/en-us/library/windows/desktop/hh446607(v=vs.85).aspx
I defined the structs and imports as follows:
[StructLayout(LayoutKind.Sequential)]
public struct PACKAGE_ID
{
uint reserved;
uint processorArchitecture;
PACKAGE_VERSION version;
String name;
String publisher;
String resourceId;
String publisherId;
}
[StructLayout(LayoutKind.Explicit)]
public struct PACKAGE_VERSION
{
[FieldOffset(0)] public UInt64 Version;
[FieldOffset(0)] public ushort Revision;
[FieldOffset(2)] public ushort Build;
[FieldOffset(4)] public ushort Minor;
[FieldOffset(6)] public ushort Major;
}
[DllImport("kernel32.dll", EntryPoint = "GetPackageId", SetLastError = true)]
static extern int GetPackageId(IntPtr hProcess,out uint bufferLength,out PACKAGE_ID pBuffer);
And calling it like this:
PACKAGE_ID buffer = new PACKAGE_ID();
result = GetPackageId(hProcess, out bufferLength, out buffer);
However I get a return value of 122 (ERROR_INSUFFICIENT_BUFFER). I am rather new to PInvoke and am not quite sure how to proceed from here. Do I need to initialize the strings before calling the function?
You are going to need to change the p/invoke:
[DllImport("kernel32.dll", SetLastError=true)]
static extern int GetPackageId(
IntPtr hProcess,
ref int bufferLength,
IntPtr pBuffer
);
You call it once passing 0 for the length:
int len = 0;
int retval = GetPackageId(hProcess, ref len, IntPtr.Zero);
Then you need to check that retval equals ERROR_INSUFFICIENT_BUFFER. If it does not then you have an error.
if (retval != ERROR_INSUFFICIENT_BUFFER)
throw new Win32Exception();
Otherwise you can continue.
IntPtr buffer = Marshal.AllocHGlobal(len);
retval = GetPackageId(hProcess, ref len, buffer);
Now you can check retval against ERROR_SUCCESS.
if (retval != ERROR_SUCCESS)
throw new Win32Exception();
And finally we can convert the buffer to a PACKAGE_ID.
PACKAGE_ID packageID = (PACKAGE_ID)Marshal.PtrToStructure(buffer,
typeof(PACKAGE_ID));
Put it all together and it looks like this:
int len = 0;
int retval = GetPackageId(hProcess, ref len, IntPtr.Zero);
if (retval != ERROR_INSUFFICIENT_BUFFER)
throw new Win32Exception();
IntPtr buffer = Marshal.AllocHGlobal((int)len);
try
{
retval = GetPackageId(hProcess, ref len, buffer);
if (retval != ERROR_SUCCESS)
throw new Win32Exception();
PACKAGE_ID packageID = (PACKAGE_ID)Marshal.PtrToStructure(buffer,
typeof(PACKAGE_ID));
}
finally
{
Marshal.FreeHGlobal(buffer);
}
From the comments it appears that we also need to make changes to the way the PACKAGE_ID struct is marshalled.
I suggest the following:
[StructLayout(LayoutKind.Sequential)]
public struct PACKAGE_ID
{
uint reserved;
uint processorArchitecture;
PACKAGE_VERSION version;
IntPtr name;
IntPtr publisher;
IntPtr resourceId;
IntPtr publisherId;
}
followed by calls to Marshal.PtrToStringUni to convert the IntPtr string fields into C# strings. Naturally this conversion needs to happen before the call to FreeHGlobal.
My guess is that the API actually allocates the string buffers in the space beyond the end of PACKAGE_ID. Which is why you have to ask how much memory to allocate. I don't have Windows 8 at hand to test this hypothesis.
From the docs for GetPackageId it seems you should send the size of the buffer as argument when calling, i.e. bufferLength should be initialized with the size of the passed buffer.
On return the bufferLength will tell you the size of the returned buffer.
Or did misread the docs?
I have this piece of code that has not been modified but all of a sudden it has stopped working... I could swear that it used to work but can't guarantee it. It throws an exception:
Attempted to read or write protected memory. This is often an indication that other memory is corrupt.
static void Main(string[] args)
{
ErrorMsg(123);
}
[DllImport("kernel32.dll", EntryPoint = "FormatMessageW", CharSet = CharSet.Auto)]
static extern int FormatMessage(int dwFlags, IntPtr lpSource, long dwMessageId, int dwLanguageId, out IntPtr MsgBuffer, int nSize, IntPtr Arguments);
[DllImport("kernel32.dll", CharSet = CharSet.Auto)]
public static extern int GetThreadLocale();
/// <summary>
/// Gets a Locale specific windows error
/// code specified.
/// </summary>
/// <param name="errorcode">The errorcode.</param>
public static string ErrorMsg(long errorcode)
{
try
{
if (errorcode == 0)
return "No Error";
IntPtr pMessageBuffer;
int dwBufferLength;
string sMsg;
int dwFormatFlags;
//FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS
dwFormatFlags = 0x00000100 | 0x00000200 | 0x00001000;
dwBufferLength = FormatMessage(dwFormatFlags, IntPtr.Zero, errorcode, GetThreadLocale(), out pMessageBuffer, 0, IntPtr.Zero);
if (dwBufferLength == 0)
return "An Unknown Error Has occured.";
sMsg = Marshal.PtrToStringUni(pMessageBuffer);
Marshal.FreeHGlobal(pMessageBuffer);
return sMsg;
}
catch (Exception ex)
{
return "An Unknown Error Has occured.";
}
}
What am I doing wrong here, I can't seem to find anything? Thanks!
Your code worked fine when I tested it on my machine. By the way is there any reason you wouldn't prefer the following method which is a little shorter and achieves equivalent goal:
static void Main()
{
var ex = new Win32Exception(123);
Console.WriteLine(ex.Message);
}
Of course under the covers Win32Exception PInvokes into FormatMessage but at least it's the .NET framework that should worry about it, not us.
UPDATE:
Here's how the Win32Exception.GetErrorMessage method is implemented in .NET:
private static string GetErrorMessage(int error)
{
string result = "";
StringBuilder stringBuilder = new StringBuilder(256);
int num = SafeNativeMethods.FormatMessage(12800, NativeMethods.NullHandleRef, error, 0, stringBuilder, stringBuilder.Capacity + 1, IntPtr.Zero);
if (num != 0)
{
int i;
for (i = stringBuilder.Length; i > 0; i--)
{
char c = stringBuilder[i - 1];
if (c > ' ' && c != '.')
{
break;
}
}
result = stringBuilder.ToString(0, i);
}
else
{
result = "Unknown error (0x" + Convert.ToString(error, 16) + ")";
}
return result;
}
where FormatMessage is declared like this:
[DllImport("kernel32.dll", BestFitMapping = true, CharSet = CharSet.Auto, SetLastError = true)]
public static extern int FormatMessage(int dwFlags, HandleRef lpSource, int dwMessageId, int dwLanguageId, StringBuilder lpBuffer, int nSize, IntPtr arguments);
Try sMsg = Marshal.PtrToStringUni(pMessageBuffer, dwBufferLength);