帮我看看我的“实现VC中的十六进制字符转十进制数字”有啥问题没?
ULONG ConvertHexToDecW(WCHAR* szHex, BYTE nHexLen)
{
WCHAR c = 0;
BYTE nMagnitude = 0;
ULONG MagnitudeArray[] = {1, 16, 256, 4096, 65536, 1048576, 16777216, 268435456};
ULONG nDec = 0;
while ( nHexLen > 0 && nMagnitude<8 )
{
c = szHex[nHexLen-1];
if ( c>=L'0' && c<=L'9' )
{
nDec += (c-L'0')*MagnitudeArray[nMagnitude];
}
else if ( c>=L'A' && c<=L'F' )
{
nDec += (10+c-L'A')*MagnitudeArray[nMagnitude];
}
else if ( c>=L'a' && c<=L'f' )
{
nDec += (10+c-L'a')*MagnitudeArray[nMagnitude];
}
--nHexLen;
++nMagnitude;
}
return nDec;
}
ULONG ConvertHexToDecA(CHAR* szHex, BYTE nHexLen)
{
CHAR c = 0;
BYTE nMagnitude = 0;
ULONG MagnitudeArray[] = {1, 16, 256, 4096, 65536, 1048576, 16777216, 268435456};
ULONG nDec = 0;
while ( nHexLen > 0 && nMagnitude<8 )
{
c = szHex[nHexLen-1];
if ( c>='0' && c<='9' )
{
nDec += (c-'0')*MagnitudeArray[nMagnitude];
}
else if ( c>='A' && c<='F' )
{
nDec += (10+c-'A')*MagnitudeArray[nMagnitude];
}
else if ( c>='a' && c<='f' )
{
nDec += (10+c-'a')*MagnitudeArray[nMagnitude];
}
--nHexLen;
++nMagnitude;
}
return nDec;
}
#ifdef UNICODE
#define ConvertHexToDec ConvertHexToDecW
#else
#define ConvertHexToDec ConvertHexToDecA
#endif