I have mapped Entity framework entities. Each table in SQL Server 2008 contains Timestamp column which is mapped as byte array. The length of array is always 8.
Now
We do it by comparing them as byte arrays. Works fine for us.
MS SQL Server's timestamp data type is semantically equivalent to binary(8) (if non-nullable) or varbinary(8) (if nullable). Ergo, compare them as arrays of bytes.
Not to mention there's overhead involved in converting to long. You could write some unsafe code to get the address of the byte arrays, cast those to long pointers and dereference them into longs, BUT to do that safely means pinning them in memory and a raft of ugly code to do something essentially simple (and probably no faster than using BitConverter).
The fastest way to do it, if performance is really that critical, the fastest way would be to do the comparison using the standard C library's memcmp() function via P/Invoke:
using System;
using System.Runtime.InteropServices;
namespace TestDrive
{
class Program
{
static void Main()
{
byte[] a = { 1,2,3,4,5,6,7,8} ;
byte[] b = { 1,2,3,4,5,0,7,8} ;
byte[] c = { 1,2,3,4,5,6,7,8} ;
bool isMatch ;
isMatch = TimestampCompare( a , b ) ; // returns false
isMatch = TimestampCompare( a , c ) ; // returns true
return ;
}
[DllImport("msvcrt.dll", CallingConvention=CallingConvention.Cdecl)]
static extern int memcmp(byte[] x , byte[] y , UIntPtr count ) ;
static unsafe bool TimestampCompare( byte[] x , byte[] y )
{
const int LEN = 8 ;
UIntPtr cnt = new UIntPtr( (uint) LEN ) ;
// check for reference equality
if ( x == y ) return true ;
if ( x == null || x.Length != LEN || y == null || y.Length != LEN )
{
throw new ArgumentException() ;
}
return ( memcmp( x , y , cnt ) == 0 ? true : false ) ;
}
}
}