0

EDIT Changing my Main method after the end = clock(); call to:

    end = clock();
    unsigned long total_time = (end - start);
    u_llong total_iterations = (max - min);

    if ((u_llong)(total_time / CLOCKS_PER_SEC) > 0)
    {
        total_time = total_time / CLOCKS_PER_SEC;
        double ips = total_iterations / total_time;
        printf("Iterations per second: %f\n", ips);
        printf("Total time taken for %I64u iterations: %lu seconds.\n", total_iterations, total_time);
    }
    else
    {
        double ipms = total_iterations / total_time;
        printf("Iterations per millisecond: %f\n", ipms);
        printf("Total time taken for %I64u iterations: %lu ms.\n", total_iterations, total_time);
    }
    printf("---------------------\n");
    fflush(stdout);
    return 0;
    }

has done the trick for me..

Here is the question again:

I have a problem. When I compile this code using:gcc main.c -Wall -std=c99 -lcrypto I don't get any errors. However when I run the code in console it should output something like this:

$ a.exe
Starting 18446744073709551615 iterations now.
Starting at 11144000000
Done. Solution: caesar at 11144802215
Total time taken for 802215 iterations: 16 seconds
Iterations per second: 50138.000000
$

However that is not the case. Instead all I get is:

$ a.exe
Starting 18446744073709551615 iterations now.
Starting at 11144000000
$

The program just stops executing and terminates before it finishes. Now the bizarre thing: when I put in a printf("some random string"); inside my for- loop it works perfectly. I commented out the beforementioned printf(); so that you can see what I meant. I really don't understand why that is, where it comes from or how to fix it. why does my code only work when I use printf? I do not want to keep it because I would like to try and get this code to execute as fast as I can.

#include <stdio.h>
#include <time.h>
#include <openssl/sha.h>
#include <string.h>

typedef unsigned long long u_llong;

#ifndef PASSWORD
#define PASSWORD "52915A4731522B93613F74A52D26F6A62AC8C5BD" //caesar
#endif
const char *base62Table = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
const int base = 62;

void hexString(char *dest, unsigned char *bytes, int bytesLength)
{
    char lookup[16] = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
    for (int i = 0; i < bytesLength; i++)
    {
        dest[2 * i] = lookup[(bytes[i] >> 4) & 0xF];
        dest[2 * i + 1] = lookup[bytes[i] & 0xF];
    }
    dest[2 * bytesLength] = 0;
}

void genHash(char *str, unsigned char *dest)
{
    SHA1((unsigned char *)str, strlen(str), dest);
}

int check(char *str)
{
    unsigned char hash[SHA_DIGEST_LENGTH] = {0};
    char hex[41] = {0};
    genHash(str, hash);
    hexString((char *)hex, (unsigned char *)hash, sizeof(hash));
    hex[40] = '\0';
    return strcmp(PASSWORD, hex) == 0;
}

void to_base(u_llong num, char *out)
{
    const int base = 62;
    if (num == 0)
    {
        out[0] = '0';
        out[1] = '\0';
        return;
    }
    int i = 0;
    while (num != 0)
    {
        out[i++] = base62Table[(num % base)];
        num /= base;
    }
    out[i] = '\0';
    out = strrev(out);
}

void run(u_llong *min, u_llong *max, char *str)
{
    //Right solution should come at i = 11144802215
    for (u_llong i = *min; i < *max; i++)
    {
        to_base(i, str);
        //printf("I only work when using this\n");
        if (check(str))
        {
            printf("Done. Solution: %s at %I64u\n", str, i);
            *max = i;
            break;
        }
    }
}

int main(void)
{
    u_llong max = 0xFFFFFFFFFFFFFFFF, min = 11144000000; //start shortly before the right solution comes up
    clock_t start, end;
    printf("Starting %I64u iterations now.\nStarting at %I64u\n", max, min);
    fflush(stdout);
    const int length = 20;
    char str[length + 1];
    //Start executing the actual task
    start = clock();
    run(&min, &max, str);
    end = clock();
    //finished executing the task
    unsigned long total = (end - start) / CLOCKS_PER_SEC;
    u_llong total_iterations = (max - min);
    double ips = (max - min) / total;
    printf("Total time taken for %I64u iterations: %lu seconds \n", total_iterations, total);
    printf("Iterations per second: %f\n", ips);
    fflush(stdout);
    return 0;
}

---EDIT---

I am compiling and running on a Windows 10 OS using Mingw64. The format string %llu does not work in my GCC (it is "unknown"), so i use %I64u which works.

When I try to Run it using gdb (after adding -g flag to compiler) I get:

$ gdb a.exe
GNU gdb (GDB) 7.8
Copyright (C) 2014 Free Software Foundation, Inc.
License GPLv3+: GNU GPL version 3 or later http://gnu.org/licenses/gpl.html
This is free software: you are free to change and redistribute it.
There is NO WARRANTY, to the extent permitted by law. Type "show copying"
and "show warranty" for details.
This GDB was configured as "i686-w64-mingw32".
Type "show configuration" for configuration details.
For help, type "help".
Type "apropos word" to search for commands related to "word"...
Reading symbols from a.exe...done.
$

when I then use the bt command to "bt -- Print backtrace of all stack frames" it simply says:

No stack.

--- edit ---

So I added a Sleep(1); included from windows.h and now it does not crash on startup for some reason. Can it really be running "too" fast? I am so confused

Someone pointed out that end == start. This is not the case as the same crash happens even when I wrap it in

//...
end = clock();
if (end == start)
    {
        printf("End equals start");
    }
    else
    {
        unsigned long total = (end - start) / CLOCKS_PER_SEC;
        double ips = (max - min) / total;
        printf("Iterations per second: %f\n", ips);
        u_llong total_iterations = (max - min);
        printf("Total time taken for %I64u iterations: %lu seconds \n", total_iterations, total);
    }
    //...
12
  • 7
    Single step through the code with your debugger...
    – Lundin
    Commented Sep 16, 2020 at 11:37
  • "%I64u" is not a valid format specifier. If you want to use and print a 64 bit unsigned integer, use int64_t from <stdint.h> and use "%"PRIu64 as a format specifier. The correct format specifier for unsigned long long is "%llu" which is at least 64 bit but can be more. Commented Sep 16, 2020 at 11:38
  • Really? @12431234123412341234123 because my compiler does not throw any fits against it. for me its working. but I will change it then
    – Kuro-dev
    Commented Sep 16, 2020 at 11:41
  • 2
    Heisenbugs often are due to memory corruption. You pass around a lot of output buffers without passing along their corresponding sizes, which is an error-prone practice and which makes the code harder to reason about. You also have a lot of hard-coded sizes instead of expressing them in terms of other quantities. (For example, 41 should be SHA_DIGEST_LENGTH * 2 + 1.)
    – jamesdlin
    Commented Sep 16, 2020 at 11:42
  • I see, I will change that and see if the bug still occurres
    – Kuro-dev
    Commented Sep 16, 2020 at 11:45

1 Answer 1

1
unsigned long total = (end - start) / CLOCKS_PER_SEC;
double ips = (max - min) / total;

When you add an additional printf, the code happens to run for long enough so that total is not zero.

Without the printf, your code is fast enough that it executes faster then a second, so (end - start) < CLOCKS_PER_SEC. Because of that (end - start) / CLOCKS_PER_SEC is zero, total is equal to zero and when dividing by zero your program gets terminated when receiving an SIGFPE signal (arithmetic exception).

This is a perfect case to brush up your debugger skills, such case is easily detectable with a debugger.

6
  • While this has solved the run issue, the question still remains. My code takes (considerably) longer than 1 second to execute. how can total be 0?
    – Kuro-dev
    Commented Sep 16, 2020 at 13:11
  • My code takes (considerably) longer than 1 second to execute most probably the difference in clock() output is just below 1 second, which happens to reuslt in total being equal to 0. Note that clock() is not real-time execution, but some "cpu clocks" used by your process. The whole program with termination takes 0.7 second on my pc.
    – KamilCuk
    Commented Sep 16, 2020 at 13:21
  • read the edit I provided in my question at the bottom. even when I wrap it in an if-statement it crashes on startup. instantly at that. its so weird
    – Kuro-dev
    Commented Sep 16, 2020 at 13:22
  • I run the code in Linux, except the %I64u have some printf problem, all works correct. The calculate start at 11144000000, end at 11144802215. So I think take less then 1 second to process 802215 integer is reasonable.
    – randomeval
    Commented Sep 16, 2020 at 13:25
  • @Kuro-dev check for total == 0, not for end == start. You could check for end - start < CLOCKS_PER_SEC. The output is buffered and may not be flushed on process termination - add fflush(0) to flush the output.
    – KamilCuk
    Commented Sep 16, 2020 at 13:25

Not the answer you're looking for? Browse other questions tagged or ask your own question.