For some reason the first xor and xnor does not produce the original results. But an additional call will. The problem appears to be related to the sign bit.
Here are the two functions
void XOR(unsigned char *data, unsigned int length, const string& key)
{
for (int k = 0, v = 0; v < length; v++)
{
data[v] = data[v] ^ key[k];
k = (++k < key.length() ? k : 0);
}
}
void XNOR(unsigned char *data, unsigned int length, const string& key)
{
for (int k = 0, v = 0; v < length; v++)
{
data[v] = ~(data[v] ^ key[k]);
k = (++k < key.length() ? k : 0);
}
}
And here are some tests
DEBUGLOG("1. %d", g_settings.Size);
XOR((unsigned char *)&g_settings, sizeof(Settings), KEY);
DEBUGLOG("2. %d", g_settings.Size);
XNOR((unsigned char *)&g_settings, sizeof(Settings), KEY);
DEBUGLOG("3. %d", g_settings.Size);
XOR((unsigned char *)&g_settings, sizeof(Settings), KEY);
DEBUGLOG("4. %d", g_settings.Size);
XNOR((unsigned char *)&g_settings, sizeof(Settings), KEY);
DEBUGLOG("5. %d", g_settings.Size);
The output I'm getting is
1. 1224
2. 977354226
3. -1225
4. -977354227
5. 1224