use uint16_t instead of magic ifdefs, leaving support for bit type in there incase we support it some day

This commit is contained in:
mrdudz 2020-07-21 23:59:05 +02:00
parent 6d518a61a5
commit 4a9c5ff63b
8 changed files with 41 additions and 121 deletions

View File

@ -6,33 +6,18 @@
#include <stdio.h>
#include <limits.h>
#include <stdint.h>
/* #define SUPPORT_BIT_TYPES */
unsigned char success=0;
unsigned char failures=0;
unsigned char dummy=0;
#ifdef SIZEOF_INT_16BIT
#if defined(__LINUX__) || defined(LINUX)
unsigned short aint0 = 0;
unsigned short aint1 = 0;
unsigned short aint2 = 0;
unsigned short aint3 = 0;
#else
unsigned int aint0 = 0;
unsigned int aint1 = 0;
unsigned int aint2 = 0;
unsigned int aint3 = 0;
#endif
#else
unsigned int aint0 = 0;
unsigned int aint1 = 0;
unsigned int aint2 = 0;
unsigned int aint3 = 0;
#endif
uint16_t aint0 = 0;
uint16_t aint1 = 0;
uint16_t aint2 = 0;
uint16_t aint3 = 0;
unsigned char achar0 = 0;
unsigned char achar1 = 0;

View File

@ -6,11 +6,14 @@
#include <stdio.h>
#include <limits.h>
#include <stdint.h>
/*
compare4.c
*/
/*#define SUPPORT_BIT_TYPES */
/*#define COMPARE_OUT_OF_RANGE 1*/
unsigned char success = 0;
@ -20,22 +23,9 @@ unsigned char dummy = 0;
#ifdef SUPPORT_BIT_TYPES
bit bit0 = 0;
#endif
#ifdef SIZEOF_INT_16BIT
#if defined(__LINUX__) || defined(LINUX)
short int0 = 0;
short int1 = 0;
#else
int int0 = 0;
int int1 = 0;
#endif
#else
int int0 = 0;
int int1 = 0;
#endif
int16_t int0 = 0;
int16_t int1 = 0;
signed char char0 = 0;
signed char char1 = 0;

View File

@ -6,6 +6,9 @@
#include <stdio.h>
#include <limits.h>
#include <stdint.h>
/* #define SUPPORT_BIT_TYPES */
unsigned char success=0;
unsigned char failures=0;
@ -14,22 +17,10 @@ unsigned char dummy=0;
#ifdef SUPPORT_BIT_TYPES
bit bit0 = 0;
#endif
#ifdef SIZEOF_INT_16BIT
#if defined(__LINUX__) || defined(LINUX)
unsigned short aint0 = 0;
unsigned short aint1 = 0;
#else
unsigned int aint0 = 0;
unsigned int aint1 = 0;
uint16_t aint0 = 0;
uint16_t aint1 = 0;
#endif
#else
unsigned int aint0 = 0;
unsigned int aint1 = 0;
#endif
unsigned char achar0 = 0;
unsigned char achar1 = 0;
unsigned char achar2 = 0;

View File

@ -6,6 +6,9 @@
#include <stdio.h>
#include <limits.h>
#include <stdint.h>
/* #define SUPPORT_BIT_TYPES */
unsigned char success=0;
unsigned char failures=0;
@ -14,22 +17,10 @@ unsigned char dummy=0;
#ifdef SUPPORT_BIT_TYPES
bit bit0 = 0;
#endif
#ifdef SIZEOF_INT_16BIT
#if defined(__LINUX__) || defined(LINUX)
unsigned short aint0 = 0;
unsigned short aint1 = 0;
#else
unsigned int aint0 = 0;
unsigned int aint1 = 0;
uint16_t aint0 = 0;
uint16_t aint1 = 0;
#endif
#else
unsigned int aint0 = 0;
unsigned int aint1 = 0;
#endif
unsigned char uchar0 = 0;
unsigned char uchar1 = 0;
unsigned char uchar2 = 0;

View File

@ -6,6 +6,9 @@
#include <stdio.h>
#include <limits.h>
#include <stdint.h>
/* #define SUPPORT_BIT_TYPES */
unsigned char success=0;
unsigned char failures=0;
@ -14,22 +17,10 @@ unsigned char dummy=0;
#ifdef SUPPORT_BIT_TYPES
bit bit0 = 0;
#endif
#ifdef SIZEOF_INT_16BIT
#if defined(__LINUX__) || defined(LINUX)
signed short aint0 = 0;
signed short aint1 = 0;
#else
signed int aint0 = 0;
signed int aint1 = 0;
int16_t aint0 = 0;
int16_t aint1 = 0;
#endif
#else
signed int aint0 = 0;
signed int aint1 = 0;
#endif
signed char achar0 = 0;
signed char achar1 = 0;
signed char achar2 = 0;

View File

@ -6,6 +6,9 @@
#include <stdio.h>
#include <limits.h>
#include <stdint.h>
/* #define SUPPORT_BIT_TYPES */
unsigned char success=0;
unsigned char failures=0;
@ -14,22 +17,10 @@ unsigned char dummy=0;
#ifdef SUPPORT_BIT_TYPES
bit bit0 = 0;
#endif
#ifdef SIZEOF_INT_16BIT
#if defined(__LINUX__) || defined(LINUX)
signed short aint0 = 0;
signed short aint1 = 0;
#else
signed int aint0 = 0;
signed int aint1 = 0;
int16_t aint0 = 0;
int16_t aint1 = 0;
#endif
#else
signed int aint0 = 0;
signed int aint1 = 0;
#endif
signed char achar0 = 0;
signed char achar1 = 0;
signed char achar2 = 0;

View File

@ -6,27 +6,14 @@
#include <stdio.h>
#include <limits.h>
#include <stdint.h>
unsigned char success=0;
unsigned char failures=0;
unsigned char dummy=0;
#ifdef SIZEOF_INT_16BIT
#if defined(__LINUX__) || defined(LINUX)
signed short aint0 = 0;
signed short aint1 = 0;
#else
signed int aint0 = 0;
signed int aint1 = 0;
#endif
#else
signed int aint0 = 0;
signed int aint1 = 0;
#endif
int16_t aint0 = 0;
int16_t aint1 = 0;
/*
signed char achar0 = 0;

View File

@ -6,6 +6,10 @@
#include <stdio.h>
#include <limits.h>
#include <stdint.h>
/* #define SUPPORT_BIT_TYPES */
/* #define SUPPORT_BIT_ARITHMETIC */
unsigned char success=0;
unsigned char failures=0;
@ -28,19 +32,9 @@ bit bit11 = 0;
#endif
#ifdef SIZEOF_INT_16BIT
#if defined(__LINUX__) || defined(LINUX)
unsigned short aint0 = 0;
unsigned short aint1 = 0;
#else
unsigned int aint0 = 0;
unsigned int aint1 = 0;
#endif
uint16_t aint0 = 0;
uint16_t aint1 = 0;
#else
unsigned int aint0 = 0;
unsigned int aint1 = 0;
#endif
unsigned char achar0 = 0;
unsigned char achar1 = 0;
unsigned char achar2 = 0;