How to call C ++ method in C # using Invoke - c#

Am I doing it right? I have two projects in parallel, the first is code that was made in C ++ and the second project (Console made in AspNetCore v3.1) is the attempt to call the method that is in C ++ code.
I need to call the C ++ method "Decrypt" in the C # project. How do I do that?
C++ code
#include <stdlib.h>
#include <string.h>
#include<windows.h>
#include "bascript.hpp"
extern "C"
int FAR PASCAL _export
Decript( const LPSTR name, const LPSTR passwordWithCript,
LPSTR passwordWithoutCript, unsigned int sizeSpaceRetorn ) {
LPSTR result = lpDecript( name, passwordWithCript);
if ( sizeSpaceRetorn < strlen(result) )
return 0;
strcpy( passwordWithoutCript, result );
delete result;
return 1;
}
C#
class Program
{
[DllImport(#"C:\MS\VS\TesteDLLCentura\TesteDLLCentura\bin\Debug\netcoreapp3.1\Sises.DLL", CharSet = CharSet.Auto, EntryPoint = "Decript")]
private static extern string Decript(string name, string passwordWithCript, string passwordWithoutCript, uint sizeSpaceRetorn);
static void Main(string[] args)
{
string retorno = Decript("<user>", "<cript_password>", "", 0);
Console.WriteLine(retorno);
Console.ReadLine();
}
}

You can return a pointer from native world (C/C++, etc.) as long as you use a .NET compatible memory allocator. On Windows, that would be the COM Allocator.
So here are 3 ways to return a string: Ansi, Unicode and BSTR (unicode). Note: you should avoid using Ansi on Windows.
C++ side:
extern "C" __declspec(dllexport) void* DecryptA(const char* name, const char* password)
{
char str[] = "hello ansi world";
int size = (lstrlenA(str) + 1) * sizeof(char); // terminating zero
// use .NET compatible allocator
void* buffer = CoTaskMemAlloc(size);
CopyMemory(buffer, str, size);
return buffer;
}
extern "C" __declspec(dllexport) void* DecryptW(const wchar_t* name, const wchar_t* password)
{
wchar_t str[] = L"hello unicode world";
int size = (lstrlenW(str) + 1) * sizeof(wchar_t); // terminating zero
// use .NET compatible allocator
void* buffer = CoTaskMemAlloc(size);
CopyMemory(buffer, str, size);
return buffer;
}
extern "C" __declspec(dllexport) BSTR DecryptBSTR(const wchar_t* name, const wchar_t* password)
{
wchar_t str[] = L"hello BSTR world";
// use .NET compatible allocator and COM coolness
return SysAllocString(str);
}
C# side:
[DllImport("mydll", CharSet = CharSet.Ansi)]
private static extern string DecryptA(string name, string password);
[DllImport("mydll", CharSet = CharSet.Unicode)]
private static extern string DecryptW(string name, string password);
[DllImport("mydll", CharSet = CharSet.Unicode)]
[return: MarshalAs(UnmanagedType.BStr)]
private static extern string DecryptBSTR(string name, string password);
...
static void Main()
{
Console.WriteLine(DecryptA("name", "password"));
Console.WriteLine(DecryptW("name", "password"));
Console.WriteLine(DecryptBSTR("name", "password"));
}

Your C++ function does not return a string or equivalent. It returns an int result (success or failure), and the actual result goes in the passwordWithoutCript buffer.
So you need to create the buffer and pass it in.
Because you are using LPSTR on the C++ side, you need CharSet.Ansi:
[DllImport(#"C:\MS\VS\TesteDLLCentura\TesteDLLCentura\bin\Debug\netcoreapp3.1\Sises.DLL", CharSet = CharSet.Ansi, EntryPoint = "Decript")]
private static extern int Decript(string name, string passwordWithCript, StringBuilder passwordWithoutCript, uint sizeSpaceRetorn);
static void Main(string[] args)
{
var sb = new StringBuilder(1000); // or whatever size
if(Decript("<user>", "<cript_password>", sb, sb.Length) == 1)
Console.WriteLine(retorno);
Console.ReadLine();
}

Related

How to use the catboost C API in dotnet?

I am trying to use catboost C API in C#. Below is the working code in C:
#include <stdio.h>
#include "c_api.h"
int main(int argc, char** argv){
float floatFeatures[3] = {96.215, 1.595655e+09, 3000};
char* catFeatures[0];
double result[1];
ModelCalcerHandle* modelHandle = ModelCalcerCreate();
// LoadFullModelFromFile is time consuming
if (!LoadFullModelFromFile(modelHandle, "../../test_catboost_model.cbm")) {
printf("LoadFullModelFromFile error message: %s\n", GetErrorString());
}
// CalcModelPredictionSingle is fast
if (!CalcModelPredictionSingle(modelHandle,
&floatFeatures, 3,
&catFeatures, 0,
&result, 1
)) {
printf("CalcModelPrediction error message: %s\n", GetErrorString());
}
ModelCalcerDelete(modelHandle);
printf("model score is %.20f", result[0]);
return 0;
}
And below is my attempt to do the same thing in C# (dotnet core on Linux), but it does not work... When I run "dotnet run" just no output no error message.
class Program
{
static void Main(string[] args)
{
var floatFeatures = new float[] { 96.215f, 1.595655e+09f, 3000 };
var catFeatures = new string[0];
var results = new double[1];
var modelHandle = ModelCalcerCreate();
if (!LoadFullModelFromFile(modelHandle, "{absolute path to the same model}/test_catboost_model.cbm"))
{
Console.WriteLine($"Load model error: {GetErrorString()}");
}
if (!CalcModelPredictionSingle(modelHandle, floatFeatures, 3, catFeatures, 0, out results, 1))
{
Console.WriteLine($"Predict error : {GetErrorString()}");
}
Console.WriteLine($"Model score is {results[0]}");
}
[DllImport("catboostmodel", EntryPoint = "ModelCalcerCreate")]
private static extern IntPtr ModelCalcerCreate();
[DllImport("catboostmodel", EntryPoint = "GetErrorString")]
private static extern string GetErrorString();
[DllImport("catboostmodel", EntryPoint = "LoadFullModelFromFile")]
private static extern bool LoadFullModelFromFile(IntPtr modelHandle, string fileName);
[DllImport("catboostmodel", EntryPoint = "CalcModelPredictionSingle")]
private static extern bool CalcModelPredictionSingle(
IntPtr modelHandle,
float[] floatFeatures, ulong floatFeaturesSize,
string[] catFeatures, ulong catFeaturesSize,
out double[] result, ulong resultSize
);
}
The relevant C header file is like below. The entire file is available on github.
#if defined(_WIN32) && !defined(CATBOOST_API_STATIC_LIB)
#ifdef _WINDLL
#define CATBOOST_API __declspec(dllexport)
#else
#define CATBOOST_API __declspec(dllimport)
#endif
#else
#define
CATBOOST_API
#endif
typedef void ModelCalcerHandle;
CATBOOST_API ModelCalcerHandle* ModelCalcerCreate();
CATBOOST_API const char* GetErrorString();
CATBOOST_API bool LoadFullModelFromFile(
ModelCalcerHandle* modelHandle,
const char* filename);
CATBOOST_API bool CalcModelPredictionSingle(
ModelCalcerHandle* modelHandle,
const float* floatFeatures, size_t floatFeaturesSize,
const char** catFeatures, size_t catFeaturesSize,
double* result, size_t resultSize);
Any suggestions are appreciated. Thank you!
It turns out I should not use the "out" keyword before "double[] result" in CalcModelPredictionSingle's signature, removing that fix the problem.
Below works.
[DllImport("catboostmodel")]
private static extern bool CalcModelPredictionSingle(
IntPtr modelHandle,
float[] floatFeatures, ulong floatFeaturesSize,
string[] catFeatures, ulong catFeaturesSize,
double[] result, ulong resultSize
);

Why IntPtr cannot be used when in subsequent call

My program:
class Program {
[DllImport("libiconvD.dll", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr libiconv_open([MarshalAs(UnmanagedType.LPStr)]
string tocode,
[MarshalAs(UnmanagedType.LPStr)]
string fromcode);
[DllImport("libiconvD.dll", CallingConvention = CallingConvention.Cdecl)]
static extern ulong libiconv(IntPtr icd,
ref StringBuilder inbuf, ref ulong inbytesleft,
out StringBuilder outbuf, out ulong outbytesleft);
[DllImport("libiconvD.dll", CallingConvention = CallingConvention.Cdecl)]
static extern int libiconv_close(IntPtr icd);
static void Main(string[] args) {
var inbuf = new StringBuilder("Rule(s): Global Tag – Refer to Print Rules – General Requirements");
ulong inbytes = (ulong)inbuf.Length;
ulong outbytes = inbytes;
StringBuilder outbuf = new StringBuilder((int)outbytes);
IntPtr icd = libiconv_open("utf8", "windows-1252");
var rcode1 = libiconv(icd, ref inbuf, ref inbytes, out outbuf, out outbytes);
Debug.WriteLine(rcode1);
var rcode2 = libiconv_close(icd);
Debug.WriteLine(rcode2);
}//Main()
}//Program CLASS
The first call of libiconv_open() works and return a pointer to icd.
When the 2nd call of libiconv() runs it gets access violation on the icd pointer.
Here is the C code being called:
size_t iconv (iconv_t icd,
ICONV_CONST char* * inbuf, size_t *inbytesleft,
char* * outbuf, size_t *outbytesleft)
{
conv_t cd = (conv_t) icd;
if (inbuf == NULL || *inbuf == NULL)
return cd->lfuncs.loop_reset(icd,outbuf,outbytesleft);
else
return cd->lfuncs.loop_convert(icd,
(const char* *)inbuf,inbytesleft,
outbuf,outbytesleft);
}
It seems it can't access the function defined in the structure that pointer points to. Is there something special that has to be done to a returned pointer to make usable in subsequent calls.
Thanks
Turns out that using libiconv library is unnecessary with C#. Just use the Encoding class.
static void Main(string[] args) {
UTF8Encoding utf8 = new UTF8Encoding();
Encoding w1252 = Encoding.GetEncoding(1252);
string inbuf = "Rule(s): Global Tag – Refer to Print Rules – General Requirements";
byte[] bytearray = utf8.GetBytes(inbuf);
byte[] outbytes = Encoding.Convert(utf8, w1252, bytearray);
Debug.WriteLine("*************************");
Debug.WriteLine(String.Format(" Input: {0}", inbuf));
Debug.WriteLine(String.Format(" Output: {0}", utf8.GetString(outbytes)));
Debug.WriteLine("*************************");
}//Main()
*************************
Input: Rule(s): Global Tag – Refer to Print Rules – General Requirements
Output: Rule(s): Global Tag – Refer to Print Rules – General Requirements
*************************

Why does passing a string from C# to a C++ dll get NULL?

I'm trying to connect a C# executable to a C++ dll. One of the methods of the dll receives a const char* and an int* (the first one specifying an input value, and the second one, an address to return a value):
extern "C" __declspec(dllexport)
int setVal(long handle, const char* ptrVal, int* ptrRet);
The first thing this function does is to check whether ptrVal is null, and returns -1 if so.
On the other hand, the C# code invokes the dll as follows:
[DllImport(dllName,
EntryPoint = "setVal",
ExactSpelling = true,
CallingConvention = CallingConvention.Cdecl,
CharSet = CharSet.Ansi)]
public static extern int setVal(long handle,
[MarshalAs(UnmanagedType.LPStr)] string str,
ref int ptrRes);
In the main function, I have
long handle = 0;
int result = 0;
int res = 0;
string str = "Hello World!";
result = setVal(handle, str, ref res);
When calling this function, I always receive a null pointer at the C side, which makes result equal to -1. I have tried different approaches when declaring the wrapper function, without success:
public static extern int setVal(long handle,
[MarshalAs(UnmanagedType.LPStr)] [In] string str,
[Out] int ptrRes);
public static unsafe extern int setVal(long handle,
[MarshalAs(UnmanagedType.LPStr)] string str,
ref int ptrRes);
public static extern int setVal(long handle,
StringBuilder sb,
ref int ptrRes); // also the unsafe version
public static extern int setVal(long handle,
byte[] value,
ref int ptrRes); // also the unsafe version
I'm using Visual Studio 2017, and .NET framework 4.6.1.
Why am I always receiving NULL as the second argument (const char*) of the dll function?
I did a bit of a google so this would be untested code but I can see it's something you haven't tried
declare external like so
[DllImport(dllName, EntryPoint = "setVal", ExactSpelling = true, CallingConvention, CallingConvention.Cdecl,CharSet = CharSet.Ansi)]
public static extern int setVal(int handle, StringBuilder sb, ref int ptrRes);
and use it like so
int handle = 0;
int result = 0;
int res = 0;
StringBuilder sb = new StringBuilder("Hello World");
result = setVal(handle, sb, ref res);
On Windows, the long handle in C/C++ is int handle in C# both at 32 and 64 bits. You can check it by doing a sizeof(long) in C/C++. Windows is LLP64.

c++ to csharp string passing with dllimport , lost accents

I am trying to send a message from c++ to csharp but some of my accents are lost in the way ( not all of them?? ) ps: writing from italian
Here is what I do :
c++:
#ifdef DLL_EXPORTS
#define DLL_API __declspec(dllexport)
#else
#define DLL_API __declspec(dllimport)
#endif
extern "C" {
DLL_API void __cdecl getResults(char* entry, wchar_t* result);
}
[...]
void getResults(char* entry,wchar_t* result)
{
std::string str(entry);
std::string Stringresult= "héà" ;
std::wstring wsTmp(Stringresult.begin(), Stringresult.end());
const wchar_t* constChar = wsTmp.c_str();
swprintf(result, Stringresult.length(), constChar);
c# :
[DllImport("libface.dll", CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)]
public static extern void getResults([MarshalAs(UnmanagedType.LPStr)] string entry, StringBuilder res);
static void Main()
{
StringBuilder result = new StringBuilder(2000);
string entry = Console.ReadLine();
getResults( entry,result);
Console.WriteLine(result);
Solved the problem thanks to this link (that state that the problem is much more complicated that some persons may think .... ) :
http://blog.kutulu.org/2012/04/marshaling-utf-8-harder-than-it-ought.html
c++ code:
extern "C" {
DLL_API char* __cdecl getResults(char* entry);
}
char* getResults(char* entry)
{
std::string Stringresult= "hàé";
char *cstr = new char[Stringresult.length() + 1];
strcpy(cstr, Stringresult.c_str());
return cstr;
}
c# code:
[DllImport("libface.dll" , EntryPoint = "getResults")]
private static extern IntPtr getResults([MarshalAs(UnmanagedType.LPStr)] string entry);
static void Main()
{
var data = new List<byte>();
var ptr = getResults("p");
var off = 0;
while (true)
{
var ch = Marshal.ReadByte(ptr, off++);
if (ch == 0)
{
break;
}
data.Add(ch);
}
string sptr = Encoding.UTF8.GetString(data.ToArray());
Console.WriteLine(sptr);

Marshal wchar_t** from C++ to C# as an out parameter?

I have this function in a dll in C and I cannot change it:
extern "C" SIBIO_MULTILANGUAGE_API_C DWORD getLabel(const char* const i_formName,
const char* const i_fieldName,
wchar_t** i_output);
I know that this call inside allocates the memory for the wchar_t* using the function CoTaskMemAlloc.
In C# I wrapped this function in this way:
[DllImport("sibio_multilanguage_c.dll", EntryPoint = "getLabel", CallingConvention = CallingConvention.Cdecl)]
private static extern UInt32 _getLabel([In] string i_formName, [In] string i_fieldName,
[MarshalAs(UnmanagedType.LPWStr)] out string i_output);
static public string getLabel(string i_formName, string i_fieldName)
{
string str = null;
UInt32 err = _getLabel(i_formName, i_fieldName, out str);
if (0 != err)
{
throw new System.IO.FileNotFoundException();
}
return str;
}
I'm able to read correctly the content of the wchar_t* but reading in this way I don't free the memory allocated in the C function.
How can I read the wchar_t* and also be able to free it? Any help is greatly appreciated!
Thanks to #Dai and #IanAbbot comments I've come up to a solution that works perfectly:
[DllImport("sibio_multilanguage_c.dll", EntryPoint = "getLabel", CallingConvention = CallingConvention.Cdecl)]
private static extern UInt32 _getLabel([In] string i_formName, [In] string i_fieldName,
out IntPtr i_output);
static public string getLabel(string i_formName, string i_fieldName)
{
IntPtr i_result;
string str = null;
UInt32 err = _getLabel(i_formName, i_fieldName, out i_result);
if (0 != err)
{
throw new System.IO.FileNotFoundException();
}
str = Marshal.PtrToStringAuto(i_result);
Marshal.FreeCoTaskMem(i_result);
return str;
}

Categories

Resources