B
B. Wood
I have written a simple program the has a structure with two members.
There are two strange things going on.
1. When on of the structure members is tested in a condition, the
value of the second member seems to change.
2. The order of the condition test seems to have an effect on if the
one structure member changes.
The program follows:
-------------------------------------------------------------------------------
#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
typedef struct LongStruct
{
unsigned long lo;
unsigned long hi;
}
LongStruct;
int
main(int argc, char * argv[])
{
LongStruct s;
s.lo = 0x00000001;
s.hi = 0x00000000;
s.lo += 0xFFFFFFFFUL;
if (s.lo < 0xFFFFFFFFUL)
{
s.hi++;
}
printf("\ns.lo tested before s.hi\n");
printf("\n#1 before s.lo test s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
if (s.lo == 0x00000000)
{
printf("\n#2 s.lo tested == 0 s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
}
else
{
printf("\n#2 s.lo tested != 0 s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
}
printf("\n#1 before s.hi test s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
if (s.hi == 0x00000001)
{
printf("\n#3 s.hi tested == 1 s.lo = %08lX s.hi = %08lX\n\n",
s.lo, s.hi);
}
else
{
printf("\n#3 s.hi tested != 1 s.lo = %08lX s.hi = %08lX\n\n",
s.lo, s.hi);
}
s.lo = 0x00000001;
s.hi = 0x00000000;
s.lo += 0xFFFFFFFFUL;
if (s.lo < 0xFFFFFFFFUL)
{
s.hi++;
}
printf("\ns.hi tested before s.lo\n");
printf("\n#1 before s.hi test s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
if (s.hi == 0x00000001)
{
printf("\n#2 s.h1 tested == 1 s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
}
else
{
printf("\n#2 s.hi tested != 1 s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
}
printf("\n#1 before s.lo test s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
if (s.lo == 0x00000000)
{
printf("\n#3 s.lo tested == 0 s.lo = %08lX s.hi = %08lX\n\n",
s.lo, s.hi);
}
else
{
printf("\n#3 s.lo tested != 0 s.lo = %08lX s.hi = %08lX\n\n",
s.lo, s.hi);
}
return (0);
}
--------------------------------------------------------------------------------
The output of the program is as follows:
s.lo tested before s.hi
#1 before s.lo test s.lo = 00000000 s.hi = 00000001
#2 s.lo tested == 0 s.lo = 00000000 s.hi = 00000000
#1 before s.hi test s.lo = 00000000 s.hi = 00000001
#3 s.hi tested == 1 s.lo = 00000000 s.hi = 00000001
s.hi tested before s.lo
#1 before s.hi test s.lo = 00000000 s.hi = 00000001
#2 s.h1 tested == 1 s.lo = 00000000 s.hi = 00000001
#1 before s.lo test s.lo = 00000000 s.hi = 00000001
#3 s.lo tested == 0 s.lo = 00000000 s.hi = 00000001
--------------------------------------------------------------------------------
This program was compiled on a Linux machine.
The version of Linux is 2.4.7-10.
The compiler used is gcc version 2.96.
This same program builds and runs fine on a Windows machine running
Windows 2K and also a Solaris machine running SunOS 5.8.
The size of unsigned longs, unsigned ints and ints is the same on all
three platforms.
Does anyone have any ideas on what could be happening ?
There are two strange things going on.
1. When on of the structure members is tested in a condition, the
value of the second member seems to change.
2. The order of the condition test seems to have an effect on if the
one structure member changes.
The program follows:
-------------------------------------------------------------------------------
#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
typedef struct LongStruct
{
unsigned long lo;
unsigned long hi;
}
LongStruct;
int
main(int argc, char * argv[])
{
LongStruct s;
s.lo = 0x00000001;
s.hi = 0x00000000;
s.lo += 0xFFFFFFFFUL;
if (s.lo < 0xFFFFFFFFUL)
{
s.hi++;
}
printf("\ns.lo tested before s.hi\n");
printf("\n#1 before s.lo test s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
if (s.lo == 0x00000000)
{
printf("\n#2 s.lo tested == 0 s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
}
else
{
printf("\n#2 s.lo tested != 0 s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
}
printf("\n#1 before s.hi test s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
if (s.hi == 0x00000001)
{
printf("\n#3 s.hi tested == 1 s.lo = %08lX s.hi = %08lX\n\n",
s.lo, s.hi);
}
else
{
printf("\n#3 s.hi tested != 1 s.lo = %08lX s.hi = %08lX\n\n",
s.lo, s.hi);
}
s.lo = 0x00000001;
s.hi = 0x00000000;
s.lo += 0xFFFFFFFFUL;
if (s.lo < 0xFFFFFFFFUL)
{
s.hi++;
}
printf("\ns.hi tested before s.lo\n");
printf("\n#1 before s.hi test s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
if (s.hi == 0x00000001)
{
printf("\n#2 s.h1 tested == 1 s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
}
else
{
printf("\n#2 s.hi tested != 1 s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
}
printf("\n#1 before s.lo test s.lo = %08lX s.hi = %08lX\n",
s.lo, s.hi);
if (s.lo == 0x00000000)
{
printf("\n#3 s.lo tested == 0 s.lo = %08lX s.hi = %08lX\n\n",
s.lo, s.hi);
}
else
{
printf("\n#3 s.lo tested != 0 s.lo = %08lX s.hi = %08lX\n\n",
s.lo, s.hi);
}
return (0);
}
--------------------------------------------------------------------------------
The output of the program is as follows:
s.lo tested before s.hi
#1 before s.lo test s.lo = 00000000 s.hi = 00000001
#2 s.lo tested == 0 s.lo = 00000000 s.hi = 00000000
#1 before s.hi test s.lo = 00000000 s.hi = 00000001
#3 s.hi tested == 1 s.lo = 00000000 s.hi = 00000001
s.hi tested before s.lo
#1 before s.hi test s.lo = 00000000 s.hi = 00000001
#2 s.h1 tested == 1 s.lo = 00000000 s.hi = 00000001
#1 before s.lo test s.lo = 00000000 s.hi = 00000001
#3 s.lo tested == 0 s.lo = 00000000 s.hi = 00000001
--------------------------------------------------------------------------------
This program was compiled on a Linux machine.
The version of Linux is 2.4.7-10.
The compiler used is gcc version 2.96.
This same program builds and runs fine on a Windows machine running
Windows 2K and also a Solaris machine running SunOS 5.8.
The size of unsigned longs, unsigned ints and ints is the same on all
three platforms.
Does anyone have any ideas on what could be happening ?