I realize this question could be processor dependent, but hopefully someone can point me in the right direction. For the life of me, I cannot figure out how to convert an unsigned long long int representing nanoseconds to a double representing seconds in C (I'm using a 32-bit big-endian PowerPC 405 for this particular test, and a GNU C99 compiler).
I've tried:
unsigned long long int nanoseconds = 1234567890LLU;
double nanoseconds_d = nanoseconds*1e-9;
Also:
nanoseconds_d = ((double) nanoseconds)*1e-9;
For both cases, I just get 0. What am I doing wrong here?
EDITED TO ADD FULL EXAMPLE
#include <stdio.h>
#include <stdlib.h>
int
main( int argc, char *argv[] )
{
unsigned long long int nanoseconds = 1234567890LLU;
double nanoseconds_d = nanoseconds * 1e-9;
printf("%g\n", nanoseconds_d);
return 0;
}
MAKEFILE
SRCS = simple.c
INCLUDE := -I$(PWD)
CFLAGS := -O0 -g3 -Wall -fmessage-length=0 -mhard-float -fsigned-char -D_REENTRANT
LIBS := -lc
OBJS = $(SRCS:.c=.o)
PROG = $(SRCS:.c=).out
all: $(PROG)
$(PROG): $(OBJS)
@echo "Linking object files with output."
$(CC) -o $(PROG) $(OBJS) $(LIBS)
@echo "Linking complete."
$(OBJS): $(SRCS)
@echo "Starting compilation."
$(CC) $(CFLAGS) $(INCLUDE) -c $<
@echo "Compilation complete."
clean::
@$(RM) *.o *.out
Works here when using %g
to print
#include <stdlib.h>
#include <stdio.h>
int main(){
unsigned long long int nanoseconds = 1234567890LLU;
double nanoseconds_d = nanoseconds*1e-9;
printf("%g\n", nanoseconds_d);
nanoseconds_d = ((double) nanoseconds)*1e-9;
printf("%g\n", nanoseconds_d);
}
outputs
1.23457
1.23457
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With