Add data throughput calculation

This commit is contained in:
Abdelrahman Said 2023-09-03 00:26:30 +01:00
parent 19c02b4e99
commit e461de30c0
6 changed files with 45 additions and 29 deletions

View File

@ -6,13 +6,11 @@ enum profiler_ids {
PROFILER_ID_JSON_PARSE, PROFILER_ID_JSON_PARSE,
PROFILER_ID_READ_JSON_FILE, PROFILER_ID_READ_JSON_FILE,
PROFILER_ID_PARSER_SETUP, PROFILER_ID_PARSER_SETUP,
PROFILER_ID_LEX_GET_TOKEN, PROFILER_ID_PARSER_PARSE_TOKENS,
PROFILER_ID_PARSE_TOKEN,
PROFILER_ID_PARSER_TEAR_DOWN, PROFILER_ID_PARSER_TEAR_DOWN,
PROFILER_ID_LOAD_JSON_PAIRS, PROFILER_ID_LOAD_JSON_PAIRS,
PROFILER_ID_READ_BINARY, PROFILER_ID_READ_BINARY,
PROFILER_ID_HAVERSINE_SUM, PROFILER_ID_HAVERSINE_SUM,
PROFILER_ID_HAVERSINE_DISTANCE,
PROFILER_ID_HAVERSINE_AVG, PROFILER_ID_HAVERSINE_AVG,
PROFILER_ID_TEAR_DOWN, PROFILER_ID_TEAR_DOWN,
PROFILER_ID_FREE_JSON, PROFILER_ID_FREE_JSON,

View File

@ -9,14 +9,15 @@
#ifdef FULL_PROFILING #ifdef FULL_PROFILING
#define SAMPLE_START(ID, TITLE) sample_start(ID, TITLE) #define SAMPLE_START(ID, TITLE) sample_start(ID, TITLE)
#define SAMPLE_END(ID) sample_end(ID) #define SAMPLE_END(ID, BYTES) sample_end(ID, BYTES)
#define SAMPLE_END_DEFAULT(ID) sample_end(ID, 0)
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
#endif #endif
void sample_start(u64 id, const char *title); void sample_start(u64 id, const char *title);
void sample_end(u64 id); void sample_end(u64 id, u64 byte_count);
#ifdef __cplusplus #ifdef __cplusplus
} }
@ -24,7 +25,8 @@ void sample_end(u64 id);
#else #else
#define SAMPLE_START(ID, TITLE) #define SAMPLE_START(ID, TITLE)
#define SAMPLE_END(ID) #define SAMPLE_END(ID, BYTES)
#define SAMPLE_END_DEFAULT(ID)
#endif // FULL_PROFILING #endif // FULL_PROFILING
#if defined(BASIC_PROFILING) || defined(FULL_PROFILING) #if defined(BASIC_PROFILING) || defined(FULL_PROFILING)
@ -44,6 +46,7 @@ struct sample {
u64 exclusive_time; u64 exclusive_time;
u64 children_time; u64 children_time;
u64 hit_count; u64 hit_count;
u64 byte_count;
profiler_sample_t *parent; profiler_sample_t *parent;
}; };

View File

@ -195,7 +195,7 @@ void free_json(jentity_t **root) {
*root = NULL; *root = NULL;
SAMPLE_END(PROFILER_ID_FREE_JSON); SAMPLE_END_DEFAULT(PROFILER_ID_FREE_JSON);
} }
jcoll_t *get_collection_from_entity(const jentity_t *entity) { jcoll_t *get_collection_from_entity(const jentity_t *entity) {

View File

@ -44,7 +44,7 @@ jentity_t *load_json(const char *filepath) {
fclose(fp); fclose(fp);
SAMPLE_END(PROFILER_ID_READ_JSON_FILE); SAMPLE_END(PROFILER_ID_READ_JSON_FILE, length);
SAMPLE_START(PROFILER_ID_PARSER_SETUP, "JSON PARSER SETUP"); SAMPLE_START(PROFILER_ID_PARSER_SETUP, "JSON PARSER SETUP");
@ -63,23 +63,19 @@ jentity_t *load_json(const char *filepath) {
return NULL; return NULL;
} }
SAMPLE_END(PROFILER_ID_PARSER_SETUP); SAMPLE_END_DEFAULT(PROFILER_ID_PARSER_SETUP);
SAMPLE_START(PROFILER_ID_PARSER_PARSE_TOKENS, "PARSE TOKENS");
SAMPLE_START(PROFILER_ID_LEX_GET_TOKEN, "GET NEXT TOKEN");
lex_result_t result = get_next_token(lexer, json); lex_result_t result = get_next_token(lexer, json);
SAMPLE_END(PROFILER_ID_LEX_GET_TOKEN);
if (result.error.errno) { if (result.error.errno) {
printf("%s\n", result.error.msg); printf("%s\n", result.error.msg);
} else { } else {
while (result.token.type != TK_NO_TOKEN) { while (result.token.type != TK_NO_TOKEN) {
SAMPLE_START(PROFILER_ID_PARSE_TOKEN, "PARSE TOKEN");
parse_token(parser, result.token); parse_token(parser, result.token);
SAMPLE_END(PROFILER_ID_PARSE_TOKEN);
SAMPLE_START(PROFILER_ID_LEX_GET_TOKEN, "GET NEXT TOKEN");
result = get_next_token(lexer, NULL); result = get_next_token(lexer, NULL);
SAMPLE_END(PROFILER_ID_LEX_GET_TOKEN);
if (result.error.errno) { if (result.error.errno) {
printf("%s\n", result.error.msg); printf("%s\n", result.error.msg);
@ -88,6 +84,8 @@ jentity_t *load_json(const char *filepath) {
} }
} }
SAMPLE_END_DEFAULT(PROFILER_ID_PARSER_PARSE_TOKENS);
jentity_t *root = parser->root; jentity_t *root = parser->root;
SAMPLE_START(PROFILER_ID_PARSER_TEAR_DOWN, "PARSER TEAR DOWN"); SAMPLE_START(PROFILER_ID_PARSER_TEAR_DOWN, "PARSER TEAR DOWN");
@ -96,7 +94,7 @@ jentity_t *load_json(const char *filepath) {
lexer_free(&lexer); lexer_free(&lexer);
free(json); free(json);
SAMPLE_END(PROFILER_ID_PARSER_TEAR_DOWN); SAMPLE_END_DEFAULT(PROFILER_ID_PARSER_TEAR_DOWN);
return root; return root;
} }

View File

@ -19,7 +19,7 @@ int main(int argc, char *argv[]) {
SAMPLE_START(PROFILER_ID_CLI_PARSE, "CLI PARSING"); SAMPLE_START(PROFILER_ID_CLI_PARSE, "CLI PARSING");
ProcessorArgs args = parse_args(argc, argv); ProcessorArgs args = parse_args(argc, argv);
SAMPLE_END(PROFILER_ID_CLI_PARSE); SAMPLE_END_DEFAULT(PROFILER_ID_CLI_PARSE);
SAMPLE_START(PROFILER_ID_JSON_PARSE, "JSON PARSING"); SAMPLE_START(PROFILER_ID_JSON_PARSE, "JSON PARSING");
@ -27,7 +27,7 @@ int main(int argc, char *argv[]) {
assert(root->type == JENTITY_SINGLE && root->value.type == JVAL_COLLECTION); assert(root->type == JENTITY_SINGLE && root->value.type == JVAL_COLLECTION);
SAMPLE_END(PROFILER_ID_JSON_PARSE); SAMPLE_END_DEFAULT(PROFILER_ID_JSON_PARSE);
SAMPLE_START(PROFILER_ID_LOAD_JSON_PAIRS, "LOAD JSON PAIRS"); SAMPLE_START(PROFILER_ID_LOAD_JSON_PAIRS, "LOAD JSON PAIRS");
@ -62,7 +62,7 @@ int main(int argc, char *argv[]) {
point_pairs[index++] = p; point_pairs[index++] = p;
} }
SAMPLE_END(PROFILER_ID_LOAD_JSON_PAIRS); SAMPLE_END_DEFAULT(PROFILER_ID_LOAD_JSON_PAIRS);
SAMPLE_START(PROFILER_ID_READ_BINARY, "BINARY READ"); SAMPLE_START(PROFILER_ID_READ_BINARY, "BINARY READ");
@ -76,7 +76,7 @@ int main(int argc, char *argv[]) {
fseek(fp, sizeof(u64), SEEK_SET); fseek(fp, sizeof(u64), SEEK_SET);
} }
SAMPLE_END(PROFILER_ID_READ_BINARY); SAMPLE_END_DEFAULT(PROFILER_ID_READ_BINARY);
SAMPLE_START(PROFILER_ID_HAVERSINE_SUM, "HAVERSINE SUM"); SAMPLE_START(PROFILER_ID_HAVERSINE_SUM, "HAVERSINE SUM");
@ -84,9 +84,7 @@ int main(int argc, char *argv[]) {
f64 distance = 0.0; f64 distance = 0.0;
f64 saved_distance = 0.0; f64 saved_distance = 0.0;
for (u64 i = 0; i < pair_count; ++i) { for (u64 i = 0; i < pair_count; ++i) {
SAMPLE_START(PROFILER_ID_HAVERSINE_DISTANCE, "HAVERSINE DISTANCE");
distance = haversine_of_degrees(point_pairs[i], EARTH_RADIUS_KM); distance = haversine_of_degrees(point_pairs[i], EARTH_RADIUS_KM);
SAMPLE_END(PROFILER_ID_HAVERSINE_DISTANCE);
if (fp) { if (fp) {
fread(&saved_distance, sizeof(f64), 1, fp); fread(&saved_distance, sizeof(f64), 1, fp);
@ -100,11 +98,11 @@ int main(int argc, char *argv[]) {
sum += distance; sum += distance;
} }
SAMPLE_END(PROFILER_ID_HAVERSINE_SUM); SAMPLE_END(PROFILER_ID_HAVERSINE_SUM, sizeof(f64) * pair_count);
SAMPLE_START(PROFILER_ID_HAVERSINE_AVG, "HAVERSINE AVERAGE"); SAMPLE_START(PROFILER_ID_HAVERSINE_AVG, "HAVERSINE AVERAGE");
printf("\nAVERAGE DISTANCE: %f\n", sum / pair_count); printf("\nAVERAGE DISTANCE: %f\n", sum / pair_count);
SAMPLE_END(PROFILER_ID_HAVERSINE_AVG); SAMPLE_END_DEFAULT(PROFILER_ID_HAVERSINE_AVG);
SAMPLE_START(PROFILER_ID_TEAR_DOWN, "TEAR DOWN"); SAMPLE_START(PROFILER_ID_TEAR_DOWN, "TEAR DOWN");
@ -116,7 +114,7 @@ int main(int argc, char *argv[]) {
free_json(&root); free_json(&root);
SAMPLE_END(PROFILER_ID_TEAR_DOWN); SAMPLE_END_DEFAULT(PROFILER_ID_TEAR_DOWN);
PROFILE_END; PROFILE_END;

View File

@ -62,7 +62,7 @@ u64 get_cpu_freq(u64 milliseconds) {
} }
void profile_start(u64 count) { void profile_start(u64 count) {
profiler.cpu_freq = get_cpu_freq(500); profiler.cpu_freq = get_cpu_freq(1000);
profiler.start = read_cpu_timer(); profiler.start = read_cpu_timer();
profiler.max_title_length = 0; profiler.max_title_length = 0;
profiler.size = count; profiler.size = count;
@ -89,16 +89,21 @@ void profile_end() {
// clang-format on // clang-format on
if (profiler.cpu_freq) { if (profiler.cpu_freq) {
printf("Total: %*.*f seconds (CPU frequency: %llu hz/sec)\n\n", printf("Total: %*.*f seconds, %zu (CPU frequency: %llu hz/sec)\n\n",
time_char_count, time_precision, (f64)total / profiler.cpu_freq, time_char_count, time_precision, (f64)total / profiler.cpu_freq,
(unsigned long long)profiler.cpu_freq); total, (unsigned long long)profiler.cpu_freq);
} }
#ifdef FULL_PROFILING #ifdef FULL_PROFILING
f64 byte_to_mb = 1.0 / (1024.0 * 1024.0);
f64 mb_to_gb = 1.0 / 1024.0;
u16 duration_char_count = 22; u16 duration_char_count = 22;
u16 hits_char_count = 10; u16 hits_char_count = 10;
u16 percentage_precision = 8; u16 percentage_precision = 8;
u16 percentage_char_count = 12; u16 percentage_char_count = 12;
u16 throughput_precision = 24;
u16 throughput_char_count = 32;
profiler_sample_t *sample = NULL; profiler_sample_t *sample = NULL;
@ -122,6 +127,18 @@ void profile_end() {
100.0); 100.0);
} }
if (sample->byte_count > 0) {
f64 data_read = (f64)(sample->byte_count) * byte_to_mb;
f64 sample_time_in_seconds =
(f64)(sample->exclusive_time + sample->children_time) /
profiler.cpu_freq;
printf(", Byte count: %*.*f MB, Throughput: %*.*f GB/s",
throughput_char_count, throughput_precision, data_read,
throughput_char_count, throughput_precision,
data_read * mb_to_gb / sample_time_in_seconds);
}
printf(")\n"); printf(")\n");
} }
#endif // FULL_PROFILING #endif // FULL_PROFILING
@ -143,6 +160,7 @@ void sample_start(u64 id, const char *title) {
sample->exclusive_time = 0; sample->exclusive_time = 0;
sample->children_time = 0; sample->children_time = 0;
sample->hit_count = 0; sample->hit_count = 0;
sample->byte_count = 0;
sample->parent = NULL; sample->parent = NULL;
u64 length = strlen(sample->title); u64 length = strlen(sample->title);
@ -175,7 +193,7 @@ void sample_start(u64 id, const char *title) {
profiler.active = sample; profiler.active = sample;
} }
void sample_end(u64 id) { void sample_end(u64 id, u64 byte_count) {
if (id >= MAX_PROFILE_SAMPLES) { if (id >= MAX_PROFILE_SAMPLES) {
return; return;
} }
@ -185,6 +203,7 @@ void sample_end(u64 id) {
u64 duration = read_cpu_timer() - sample->start; u64 duration = read_cpu_timer() - sample->start;
sample->exclusive_time += duration; sample->exclusive_time += duration;
sample->byte_count += byte_count;
u64 now = read_cpu_timer(); u64 now = read_cpu_timer();