Let\'s say that on the C++ side my function takes a variable of type jstring
named myString
. I can convert it to an ANSI string as follows:
If this helps someone... I've used this function for an Android project:
std::wstring Java_To_WStr(JNIEnv *env, jstring string)
{
std::wstring value;
const jchar *raw = env->GetStringChars(string, 0);
jsize len = env->GetStringLength(string);
const jchar *temp = raw;
while (len > 0)
{
value += *(temp++);
len--;
}
env->ReleaseStringChars(string, raw);
return value;
}
An improved solution could be (Thanks for the feedback):
std::wstring Java_To_WStr(JNIEnv *env, jstring string)
{
std::wstring value;
const jchar *raw = env->GetStringChars(string, 0);
jsize len = env->GetStringLength(string);
value.assign(raw, raw + len);
env->ReleaseStringChars(string, raw);
return value;
}
A portable and robust solution is to use iconv, with the understanding that you have to know what encoding your system wchar_t
uses (UTF-16 on Windows, UTF-32 on many Unix systems, for example).
If you want to minimise your dependency on third-party code, you can also hand-roll your own UTF-8 converter. This is easy if converting to UTF-32, somewhat harder with UTF-16 because you have to handle surrogate pairs too. :-P Also, you must be careful to reject non-shortest forms, or it can open up security bugs in some cases.
Rather simple. But do not forget to free the memory by ReleaseStringChars
JNIEXPORT jboolean JNICALL Java_TestClass_test(JNIEnv * env, jobject, jstring string)
{
const wchar_t * utf16 = (wchar_t *)env->GetStringChars(string, NULL);
...
env->ReleaseStringChars(string, utf16);
}
I try to jstring->char->wchar_t
char* js2c(JNIEnv* env, jstring jstr)
{
char* rtn = NULL;
jclass clsstring = env->FindClass("java/lang/String");
jstring strencode = env->NewStringUTF("utf-8");
jmethodID mid = env->GetMethodID(clsstring, "getBytes", "(Ljava/lang/String;)[B");
jbyteArray barr = (jbyteArray)env->CallObjectMethod(jstr, mid, strencode);
jsize alen = env->GetArrayLength(barr);
jbyte* ba = env->GetByteArrayElements(barr, JNI_FALSE);
if (alen > 0)
{
rtn = (char*)malloc(alen + 1);
memcpy(rtn, ba, alen);
rtn[alen] = 0;
}
env->ReleaseByteArrayElements(barr, ba, 0);
return rtn;
}
jstring c2js(JNIEnv* env, const char* str) {
jstring rtn = 0;
int slen = strlen(str);
unsigned short * buffer = 0;
if (slen == 0)
rtn = (env)->NewStringUTF(str);
else {
int length = MultiByteToWideChar(CP_ACP, 0, (LPCSTR)str, slen, NULL, 0);
buffer = (unsigned short *)malloc(length * 2 + 1);
if (MultiByteToWideChar(CP_ACP, 0, (LPCSTR)str, slen, (LPWSTR)buffer, length) > 0)
rtn = (env)->NewString((jchar*)buffer, length);
free(buffer);
}
return rtn;
}
jstring w2js(JNIEnv *env, wchar_t *src)
{
size_t len = wcslen(src) + 1;
size_t converted = 0;
char *dest;
dest = (char*)malloc(len * sizeof(char));
wcstombs_s(&converted, dest, len, src, _TRUNCATE);
jstring dst = c2js(env, dest);
return dst;
}
wchar_t *js2w(JNIEnv *env, jstring src) {
char *dest = js2c(env, src);
size_t len = strlen(dest) + 1;
size_t converted = 0;
wchar_t *dst;
dst = (wchar_t*)malloc(len * sizeof(wchar_t));
mbstowcs_s(&converted, dst, len, dest, _TRUNCATE);
return dst;
}
Here is how I converted jstring
to LPWSTR
.
const char* nativeString = env->GetStringUTFChars(javaString, 0);
size_t size = strlen(nativeString) + 1;
LPWSTR lpwstr = new wchar_t[size];
size_t outSize;
mbstowcs_s(&outSize, lpwstr, size, nativeString, size - 1);
And who frees wsz? I would recommend STL!
std::wstring JavaToWSZ(JNIEnv* env, jstring string)
{
std::wstring value;
if (string == NULL) {
return value; // empty string
}
const jchar* raw = env->GetStringChars(string, NULL);
if (raw != NULL) {
jsize len = env->GetStringLength(string);
value.assign(raw, len);
env->ReleaseStringChars(string, raw);
}
return value;
}