I have a string:
LogoDataStr = \"ABC0000\"
I want to convert to ASCII bytes and the result should be:
LogoDataBy[0] = 0x41;
Just throwing:
Encoding.ASCII.GetBytes("ABC0000").Dump();
Into LinqPAD gives an output of (decimal):
Byte[] (7 items)
65
66
67
48
48
48
48
So I'm not sure how you're getting 0x00...
This code
class Program
{
static void Main(string[] args)
{
byte[] LogoDataBy = ASCIIEncoding.ASCII.GetBytes("ABC000");
}
}
produces expected output
Double check your code and the value of the string before you read ASCII bytes.
class CustomAscii
{
private static Dictionary<char, byte> dictionary;
static CustomAscii()
{
byte numcounter = 0x30;
byte charcounter = 0x41;
byte ucharcounter = 0x61;
string numbers = "0123456789";
string chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
string uchars = "abcdefghijklmnopqrstuvwxyz";
dictionary = new Dictionary<char, byte>();
foreach (char c in numbers)
{
dictionary.Add(c, numcounter++);
}
foreach (char c in chars)
{
dictionary.Add(c, charcounter++);
}
foreach (char c in uchars)
{
dictionary.Add(c, ucharcounter++);
}
}
public static byte[] getCustomBytes(string t)
{
int iter = 0;
byte[] b = new byte[t.Length];
foreach (char c in t)
{
b[iter] = dictionary[c];
//DEBUG: Console.WriteLine(b[iter++].ToString());
}
return b;
}
}
This is how i would do it. JUST IF Encoding.ASCII.GetBytes() would return wrong values.