Use numerical IDs for the profiler

This commit is contained in:
Abdelrahman Said 2023-07-09 22:15:53 +01:00
parent e17ca4d3d2
commit 43ec97378c
2 changed files with 40 additions and 15 deletions

View File

@ -0,0 +1,22 @@
#ifndef PROFILER_IDS_H
#define PROFILER_IDS_H
enum profiler_ids {
PROFILER_ID_CLI_PARSE,
PROFILER_ID_JSON_PARSE,
PROFILER_ID_READ_JSON_FILE,
PROFILER_ID_PARSER_SETUP,
PROFILER_ID_LEX_GET_TOKEN,
PROFILER_ID_PARSE_TOKEN,
PROFILER_ID_PARSER_TEAR_DOWN,
PROFILER_ID_LOAD_JSON_PAIRS,
PROFILER_ID_READ_BINARY,
PROFILER_ID_HAVERSINE_SUM,
PROFILER_ID_HAVERSINE_DISTANCE,
PROFILER_ID_HAVERSINE_AVG,
PROFILER_ID_TEAR_DOWN,
COUNT_PROFILER_IDS,
};
#endif // !PROFILER_IDS_H

View File

@ -1,6 +1,7 @@
#include "haversine.h"
#include "point_types.h"
#include "processor/proc_argparser.h"
#include "profiler/ids.h"
#include "profiler/timer.h"
#include "json/dstring.h"
#include "json/json_entities.h"
@ -14,21 +15,21 @@
#include <string.h>
int main(int argc, char *argv[]) {
PROFILE_START;
PROFILE_START(COUNT_PROFILER_IDS);
SAMPLE_START(cli_parse, "CLI PARSING");
SAMPLE_START(PROFILER_ID_CLI_PARSE, "CLI PARSING");
ProcessorArgs args = parse_args(argc, argv);
SAMPLE_END(cli_parse);
SAMPLE_END(PROFILER_ID_CLI_PARSE);
SAMPLE_START(json_parse, "JSON PARSING");
SAMPLE_START(PROFILER_ID_JSON_PARSE, "JSON PARSING");
jentity_t *root = load_json(args.filepath);
assert(root->type == JENTITY_SINGLE && root->value.type == JVAL_COLLECTION);
SAMPLE_END(json_parse);
SAMPLE_END(PROFILER_ID_JSON_PARSE);
SAMPLE_START(load_pairs_json, "LOAD JSON PAIRS");
SAMPLE_START(PROFILER_ID_LOAD_JSON_PAIRS, "LOAD JSON PAIRS");
jentity_t *pairs = root->value.collection->begin;
@ -61,9 +62,9 @@ int main(int argc, char *argv[]) {
point_pairs[index++] = p;
}
SAMPLE_END(load_pairs_json);
SAMPLE_END(PROFILER_ID_LOAD_JSON_PAIRS);
SAMPLE_START(binary_file_read, "BINARY READ");
SAMPLE_START(PROFILER_ID_READ_BINARY, "BINARY READ");
const char *filename = "count_and_distances";
@ -75,15 +76,17 @@ int main(int argc, char *argv[]) {
fseek(fp, sizeof(u64), SEEK_SET);
}
SAMPLE_END(binary_file_read);
SAMPLE_END(PROFILER_ID_READ_BINARY);
SAMPLE_START(haversine_sum, "HAVERSINE SUM");
SAMPLE_START(PROFILER_ID_HAVERSINE_SUM, "HAVERSINE SUM");
f64 sum = 0.0;
f64 distance = 0.0;
f64 saved_distance = 0.0;
for (u64 i = 0; i < pair_count; ++i) {
SAMPLE_START(PROFILER_ID_HAVERSINE_DISTANCE, "HAVERSINE DISTANCE");
distance = haversine_of_degrees(point_pairs[i], EARTH_RADIUS_KM);
SAMPLE_END(PROFILER_ID_HAVERSINE_DISTANCE);
if (fp) {
fread(&saved_distance, sizeof(f64), 1, fp);
@ -97,13 +100,13 @@ int main(int argc, char *argv[]) {
sum += distance;
}
SAMPLE_END(haversine_sum);
SAMPLE_END(PROFILER_ID_HAVERSINE_SUM);
SAMPLE_START(haversine_average, "HAVERSINE AVERAGE");
SAMPLE_START(PROFILER_ID_HAVERSINE_AVG, "HAVERSINE AVERAGE");
printf("\nAVERAGE DISTANCE: %f\n", sum / pair_count);
SAMPLE_END(haversine_average);
SAMPLE_END(PROFILER_ID_HAVERSINE_AVG);
SAMPLE_START(tear_down, "TEAR DOWN");
SAMPLE_START(PROFILER_ID_TEAR_DOWN, "TEAR DOWN");
if (fp) {
fclose(fp);
@ -111,7 +114,7 @@ int main(int argc, char *argv[]) {
free(point_pairs);
SAMPLE_END(tear_down);
SAMPLE_END(PROFILER_ID_TEAR_DOWN);
PROFILE_END;