I wrote code for setting bits within 32-bit integers. I defined all masks needed. When I set a bit I just give my variable an or operation, that's ok. But when I want to unset it I don't know if I do the right thing. Now I use the xor operator (^) Is this a good idea? Or is there a simpler idea?
bits.h:
Code: Select all
#ifndef BITS_H_INCLUDED
#define BITS_H_INCLUDED
/* bits 0 to 7 */
#define BIT_0 0x00000001 // (0000 0000 0000 0000 0000 0000 0000 0001)
#define BIT_1 0x00000002 // (0000 0000 0000 0000 0000 0000 0000 0010)
#define BIT_2 0x00000004 // (0000 0000 0000 0000 0000 0000 0000 0100)
#define BIT_3 0x00000008 // (0000 0000 0000 0000 0000 0000 0000 1000)
#define BIT_4 0x00000010 // (0000 0000 0000 0000 0000 0000 0001 0000)
#define BIT_5 0x00000020 // (0000 0000 0000 0000 0000 0000 0010 0000)
#define BIT_6 0x00000040 // (0000 0000 0000 0000 0000 0000 0100 0000)
#define BIT_7 0x00000080 // (0000 0000 0000 0000 0000 0000 1000 0000)
/* byte */
/* bits 8 to 15 */
#define BIT_8 0x00000100 // (0000 0000 0000 0000 0000 0001 0000 0000)
#define BIT_9 0x00000200 // (0000 0000 0000 0000 0000 0010 0000 0000)
#define BIT_10 0x00000400 // (0000 0000 0000 0000 0000 0100 0000 0000)
#define BIT_11 0x00000800 // (0000 0000 0000 0000 0000 1000 0000 0000)
#define BIT_12 0x00001000 // (0000 0000 0000 0000 0001 0000 0000 0000)
#define BIT_13 0x00002000 // (0000 0000 0000 0000 0010 0000 0000 0000)
#define BIT_14 0x00004000 // (0000 0000 0000 0000 0100 0000 0000 0000)
#define BIT_15 0x00008000 // (0000 0000 0000 0000 1000 0000 0000 0000)
/* word */
/* bits 16 to 31 */
#define BIT_16 0x00010000 // (0000 0000 0000 0001 0000 0000 0000 0000)
#define BIT_17 0x00020000 // (0000 0000 0000 0010 0000 0000 0000 0000)
#define BIT_18 0x00040000 // (0000 0000 0000 0100 0000 0000 0000 0000)
#define BIT_19 0x00080000 // (0000 0000 0000 1000 0000 0000 0000 0000)
#define BIT_20 0x00100000 // (0000 0000 0001 0000 0000 0000 0000 0000)
#define BIT_21 0x00200000 // (0000 0000 0010 0000 0000 0000 0000 0000)
#define BIT_22 0x00400000 // (0000 0000 0100 0000 0000 0000 0000 0000)
#define BIT_23 0x00800000 // (0000 0000 1000 0000 0000 0000 0000 0000)
#define BIT_24 0x01000000 // (0000 0001 0000 0000 0000 0000 0000 0000)
#define BIT_25 0x02000000 // (0000 0010 0000 0000 0000 0000 0000 0000)
#define BIT_26 0x04000000 // (0000 0100 0000 0000 0000 0000 0000 0000)
#define BIT_27 0x08000000 // (0000 1000 0000 0000 0000 0000 0000 0000)
#define BIT_28 0x10000000 // (0001 0000 0000 0000 0000 0000 0000 0000)
#define BIT_29 0x20000000 // (0010 0000 0000 0000 0000 0000 0000 0000)
#define BIT_30 0x40000000 // (0100 0000 0000 0000 0000 0000 0000 0000)
#define BIT_31 0x80000000 // (1000 0000 0000 0000 0000 0000 0000 0000)
/* double word */
/* set a bit */
#define SET_BIT(a,b) a = a | b;
/* clear a bit */
#define CLEAR_BIT(a,b) a = a ^ b;
/* test if bit is set */
#define BIT_SET(a,b) a & b
/* define mask type */
typedef unsigned long mask_t;
/* lookup table */
extern mask_t bitmask_lookup_tbl[32];
#endif
Code: Select all
unsigned long flags;
SET_BIT(flags, BIT_7);
SET_BIT(flags, BIT_8);
if(BIT_SET(flags, BIT_7)
{
CLEAR_BIT(flags, BIT_8);
}
edit:
When I try:
Code: Select all
if(!BIT_SET(flags, BIT_8));