Add data throughput calculation

This commit is contained in:
2023-09-03 00:26:30 +01:00
parent 19c02b4e99
commit e461de30c0
6 changed files with 45 additions and 29 deletions

View File

@@ -195,7 +195,7 @@ void free_json(jentity_t **root) {
*root = NULL;
SAMPLE_END(PROFILER_ID_FREE_JSON);
SAMPLE_END_DEFAULT(PROFILER_ID_FREE_JSON);
}
jcoll_t *get_collection_from_entity(const jentity_t *entity) {

View File

@@ -44,7 +44,7 @@ jentity_t *load_json(const char *filepath) {
fclose(fp);
SAMPLE_END(PROFILER_ID_READ_JSON_FILE);
SAMPLE_END(PROFILER_ID_READ_JSON_FILE, length);
SAMPLE_START(PROFILER_ID_PARSER_SETUP, "JSON PARSER SETUP");
@@ -63,23 +63,19 @@ jentity_t *load_json(const char *filepath) {
return NULL;
}
SAMPLE_END(PROFILER_ID_PARSER_SETUP);
SAMPLE_END_DEFAULT(PROFILER_ID_PARSER_SETUP);
SAMPLE_START(PROFILER_ID_PARSER_PARSE_TOKENS, "PARSE TOKENS");
SAMPLE_START(PROFILER_ID_LEX_GET_TOKEN, "GET NEXT TOKEN");
lex_result_t result = get_next_token(lexer, json);
SAMPLE_END(PROFILER_ID_LEX_GET_TOKEN);
if (result.error.errno) {
printf("%s\n", result.error.msg);
} else {
while (result.token.type != TK_NO_TOKEN) {
SAMPLE_START(PROFILER_ID_PARSE_TOKEN, "PARSE TOKEN");
parse_token(parser, result.token);
SAMPLE_END(PROFILER_ID_PARSE_TOKEN);
SAMPLE_START(PROFILER_ID_LEX_GET_TOKEN, "GET NEXT TOKEN");
result = get_next_token(lexer, NULL);
SAMPLE_END(PROFILER_ID_LEX_GET_TOKEN);
if (result.error.errno) {
printf("%s\n", result.error.msg);
@@ -88,6 +84,8 @@ jentity_t *load_json(const char *filepath) {
}
}
SAMPLE_END_DEFAULT(PROFILER_ID_PARSER_PARSE_TOKENS);
jentity_t *root = parser->root;
SAMPLE_START(PROFILER_ID_PARSER_TEAR_DOWN, "PARSER TEAR DOWN");
@@ -96,7 +94,7 @@ jentity_t *load_json(const char *filepath) {
lexer_free(&lexer);
free(json);
SAMPLE_END(PROFILER_ID_PARSER_TEAR_DOWN);
SAMPLE_END_DEFAULT(PROFILER_ID_PARSER_TEAR_DOWN);
return root;
}