;;; test foreign type conversions (define convert (foreign-lambda* void ((integer i) (double d) (float f)) #<! void convert_c(int i, double d, float f) { printf("i: %d\n", i); printf("d: %.20g\n", d); printf("f: %.20f\n", f); } short convert_int_to_short(int i) { return (short)i; } unsigned short convert_int_to_ushort(int i) { return (unsigned short)i; } int add_long_and_ulong(long i, unsigned long j) { return i+j; } int add_short_and_ushort(short i, unsigned short j) { return i+j; } int add_char_and_uchar(char i, unsigned char j) { return i+j; } <# #| #;2> (convert 1 2 3) i: 1 d: 2 f: 3.000000 #;3> (convert 1.0 2.0 3.0) i: 1 d: 2 f: 3.000000 #;4> (convert 1.1 2.1 3.1) i: 1 d: 2.1 f: 3.100000 #;5> (convert #xffffffff 2.123456789123456789 3.123456789123456789) Error: bad argument type - not an integer: 4294967295.0 #;3> (convert 1.1 2.123456789123456789 3.123456789123456789) i: 1 d: 2.1234567891234568116 f: 3.12345671653747558594 ;; Notice "integer" is a -signed- integer (even if represented in Scheme as a flonum). ;; Although #xffffffff can be represented in 32 bits, it is out of the range ;; of a signed integer and not automatically converted to signed. Bug or feature? ;; convert_c has the same behavior. #;10> (convert 2147483647.0 3 3) i: 2147483647 #;10> (convert 2147483648.0 3 3) Error: bad argument type - not an integer: 2147483648.0 #;10> (convert -214783647.0 3 3) i: -214783647 ;; #;11> (convert_u 4294967295.0) i: 4294967295 #;10> (convert_u -1073741824) i: 3221225472d #;11> (convert_u -1073741825) Error: bad argument type - not an unsigned integer: -1073741825.0 |#