I am facing a rather peculiar problem. I am working on a compiler for an architecture that doesn\'t support bitwise operations. However, it handles signed 16-bit integer arithme
For example a 16-bit AND:
int and(int a, int b) {
int d=0x8000;
int result=0;
while (d>0) {
if (a>=d && b>=d) result+=d;
if (a>=d) a-=d;
if (b>=d) b-=d;
d/=2;
}
return result;
}
double solution 2-bit AND without loops or table lookups:
int and(int a, int b) {
double x=a*b/12;
return (int) (4*(sign(ceil(tan(50*x)))/6+x));
}
32-bit integer solution 2-bit AND:
int and(int a, int b) {
return ((684720128*a*a -b) * a) % (b+1);
}
16-bit integer solution 2-bit AND:
int and(int a, int b) {
return ((121 * a) % 16) % (b+1);
}
16-bit integer solution 3-bit AND:
int and(int a, int b) {
return sign(a) * ((((-23-a) * (40+b)) % 2)+40+b) % ((10624 * ((((-23-a) * (40+b))%2)+40+b)) % (a%2 - 2 -a) - a%2 + 2 +a);
}