2025-12-21 08:44:04 +01:00
|
|
|
#include "amduat/enc/asl1_core.h"
|
|
|
|
|
#include "amduat/enc/tgk1_edge.h"
|
|
|
|
|
#include "amduat/hash/asl1.h"
|
|
|
|
|
#include "amduat/tgk/tgk_store_mem.h"
|
|
|
|
|
|
|
|
|
|
#include <stdbool.h>
|
|
|
|
|
#include <stdint.h>
|
|
|
|
|
#include <stdio.h>
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
#include <string.h>
|
|
|
|
|
|
|
|
|
|
static const uint8_t k_non_edge_bytes[] = {0xde, 0xad, 0xbe, 0xef};
|
|
|
|
|
|
|
|
|
|
typedef struct {
|
|
|
|
|
amduat_tgk_store_t store;
|
|
|
|
|
amduat_tgk_store_mem_t mem;
|
2025-12-21 19:46:56 +01:00
|
|
|
amduat_tgk_store_mem_artifact_t artifacts[6];
|
2025-12-21 08:44:04 +01:00
|
|
|
amduat_octets_t edge_bytes[4];
|
|
|
|
|
amduat_reference_t ref_edge1;
|
|
|
|
|
amduat_reference_t ref_edge2;
|
|
|
|
|
amduat_reference_t ref_edge3;
|
|
|
|
|
amduat_reference_t ref_edge_bad;
|
|
|
|
|
amduat_reference_t ref_non_edge;
|
2025-12-21 19:46:56 +01:00
|
|
|
amduat_reference_t ref_invalid_tagged;
|
2025-12-21 19:23:41 +01:00
|
|
|
amduat_reference_t ref_missing;
|
2025-12-21 08:44:04 +01:00
|
|
|
amduat_reference_t node_a;
|
|
|
|
|
amduat_reference_t node_b;
|
|
|
|
|
amduat_reference_t node_c;
|
|
|
|
|
amduat_reference_t payload1;
|
|
|
|
|
amduat_reference_t payload2;
|
|
|
|
|
amduat_reference_t payload3;
|
|
|
|
|
amduat_reference_t payload_bad;
|
|
|
|
|
uint8_t digest_edge1[32];
|
|
|
|
|
uint8_t digest_edge2[32];
|
|
|
|
|
uint8_t digest_edge3[32];
|
|
|
|
|
uint8_t digest_edge_bad[32];
|
|
|
|
|
uint8_t digest_non_edge[32];
|
2025-12-21 19:46:56 +01:00
|
|
|
uint8_t digest_invalid_tagged[32];
|
2025-12-21 19:23:41 +01:00
|
|
|
uint8_t digest_missing[32];
|
2025-12-21 08:44:04 +01:00
|
|
|
uint8_t digest_node_a[32];
|
|
|
|
|
uint8_t digest_node_b[32];
|
|
|
|
|
uint8_t digest_node_c[32];
|
|
|
|
|
uint8_t digest_payload1[32];
|
|
|
|
|
uint8_t digest_payload2[32];
|
|
|
|
|
uint8_t digest_payload3[32];
|
|
|
|
|
uint8_t digest_payload_bad[32];
|
|
|
|
|
amduat_tgk_identity_domain_t domains[1];
|
|
|
|
|
uint32_t edge_tags[1];
|
|
|
|
|
amduat_tgk_edge_type_id_t edge_types[2];
|
|
|
|
|
amduat_asl_encoding_profile_id_t encodings[1];
|
|
|
|
|
amduat_tgk_store_config_t config;
|
|
|
|
|
} test_env_t;
|
|
|
|
|
|
|
|
|
|
static void fill_digest(uint8_t *out, uint8_t value) {
|
|
|
|
|
memset(out, value, 32);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static amduat_reference_t make_ref(uint8_t value, uint8_t *storage) {
|
|
|
|
|
fill_digest(storage, value);
|
|
|
|
|
return amduat_reference(AMDUAT_HASH_ASL1_ID_SHA256,
|
|
|
|
|
amduat_octets(storage, 32));
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 19:46:56 +01:00
|
|
|
static void free_ref(amduat_reference_t *ref) {
|
|
|
|
|
if (ref == NULL) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
free((void *)ref->digest.data);
|
|
|
|
|
ref->digest.data = NULL;
|
|
|
|
|
ref->digest.len = 0;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 08:44:04 +01:00
|
|
|
static void free_edge_bytes(test_env_t *env) {
|
|
|
|
|
size_t i;
|
|
|
|
|
|
|
|
|
|
for (i = 0; i < 4; ++i) {
|
|
|
|
|
free((void *)env->edge_bytes[i].data);
|
|
|
|
|
env->edge_bytes[i].data = NULL;
|
|
|
|
|
env->edge_bytes[i].len = 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static bool init_env(test_env_t *env) {
|
|
|
|
|
amduat_tgk_edge_body_t edge1;
|
|
|
|
|
amduat_tgk_edge_body_t edge2;
|
|
|
|
|
amduat_tgk_edge_body_t edge3;
|
|
|
|
|
amduat_tgk_edge_body_t edge_bad;
|
|
|
|
|
amduat_reference_t edge1_from[1];
|
|
|
|
|
amduat_reference_t edge1_to[1];
|
|
|
|
|
amduat_reference_t edge2_from[1];
|
|
|
|
|
amduat_reference_t edge2_to[1];
|
|
|
|
|
amduat_reference_t edge3_from[1];
|
|
|
|
|
amduat_reference_t edge3_to[1];
|
|
|
|
|
amduat_reference_t edge_bad_from[1];
|
|
|
|
|
amduat_reference_t edge_bad_to[1];
|
|
|
|
|
|
|
|
|
|
memset(env, 0, sizeof(*env));
|
|
|
|
|
|
|
|
|
|
env->domains[0].encoding_profile = AMDUAT_ENC_ASL1_CORE_V1;
|
|
|
|
|
env->domains[0].hash_id = AMDUAT_HASH_ASL1_ID_SHA256;
|
|
|
|
|
env->edge_tags[0] = TYPE_TAG_TGK1_EDGE_V1;
|
|
|
|
|
env->edge_types[0] = 0x10;
|
|
|
|
|
env->edge_types[1] = 0x20;
|
|
|
|
|
env->encodings[0] = TGK1_EDGE_ENC_V1;
|
|
|
|
|
|
|
|
|
|
env->config.id_space.domains = env->domains;
|
|
|
|
|
env->config.id_space.domains_len = 1;
|
|
|
|
|
env->config.artifact_scope.description = amduat_octets(NULL, 0);
|
|
|
|
|
env->config.tgk_profiles.edge_tags = env->edge_tags;
|
|
|
|
|
env->config.tgk_profiles.edge_tags_len = 1;
|
|
|
|
|
env->config.tgk_profiles.edge_types = env->edge_types;
|
|
|
|
|
env->config.tgk_profiles.edge_types_len = 2;
|
|
|
|
|
env->config.tgk_profiles.encodings = env->encodings;
|
|
|
|
|
env->config.tgk_profiles.encodings_len = 1;
|
|
|
|
|
|
|
|
|
|
env->ref_edge1 = make_ref(0x20, env->digest_edge1);
|
|
|
|
|
env->ref_edge2 = make_ref(0x10, env->digest_edge2);
|
|
|
|
|
env->ref_edge3 = make_ref(0x30, env->digest_edge3);
|
|
|
|
|
env->ref_edge_bad = make_ref(0x40, env->digest_edge_bad);
|
|
|
|
|
env->ref_non_edge = make_ref(0x50, env->digest_non_edge);
|
2025-12-21 19:46:56 +01:00
|
|
|
env->ref_invalid_tagged = make_ref(0x55, env->digest_invalid_tagged);
|
2025-12-21 19:23:41 +01:00
|
|
|
env->ref_missing = make_ref(0x60, env->digest_missing);
|
2025-12-21 08:44:04 +01:00
|
|
|
|
|
|
|
|
env->node_a = make_ref(0xa1, env->digest_node_a);
|
|
|
|
|
env->node_b = make_ref(0xb1, env->digest_node_b);
|
|
|
|
|
env->node_c = make_ref(0xc1, env->digest_node_c);
|
|
|
|
|
|
|
|
|
|
env->payload1 = make_ref(0xe1, env->digest_payload1);
|
|
|
|
|
env->payload2 = make_ref(0xe2, env->digest_payload2);
|
|
|
|
|
env->payload3 = make_ref(0xe3, env->digest_payload3);
|
|
|
|
|
env->payload_bad = make_ref(0xe4, env->digest_payload_bad);
|
|
|
|
|
|
|
|
|
|
memset(&edge1, 0, sizeof(edge1));
|
|
|
|
|
edge1.type = 0x10;
|
|
|
|
|
edge1_from[0] = env->node_a;
|
|
|
|
|
edge1.from = edge1_from;
|
|
|
|
|
edge1.from_len = 1;
|
|
|
|
|
edge1_to[0] = env->node_b;
|
|
|
|
|
edge1.to = edge1_to;
|
|
|
|
|
edge1.to_len = 1;
|
|
|
|
|
edge1.payload = env->payload1;
|
|
|
|
|
|
|
|
|
|
memset(&edge2, 0, sizeof(edge2));
|
|
|
|
|
edge2.type = 0x10;
|
|
|
|
|
edge2_from[0] = env->node_a;
|
|
|
|
|
edge2.from = edge2_from;
|
|
|
|
|
edge2.from_len = 1;
|
|
|
|
|
edge2_to[0] = env->node_c;
|
|
|
|
|
edge2.to = edge2_to;
|
|
|
|
|
edge2.to_len = 1;
|
|
|
|
|
edge2.payload = env->payload2;
|
|
|
|
|
|
|
|
|
|
memset(&edge3, 0, sizeof(edge3));
|
|
|
|
|
edge3.type = 0x20;
|
|
|
|
|
edge3_from[0] = env->node_b;
|
|
|
|
|
edge3.from = edge3_from;
|
|
|
|
|
edge3.from_len = 1;
|
|
|
|
|
edge3_to[0] = env->node_a;
|
|
|
|
|
edge3.to = edge3_to;
|
|
|
|
|
edge3.to_len = 1;
|
|
|
|
|
edge3.payload = env->payload3;
|
|
|
|
|
|
|
|
|
|
memset(&edge_bad, 0, sizeof(edge_bad));
|
|
|
|
|
edge_bad.type = 0x99;
|
|
|
|
|
edge_bad_from[0] = env->node_a;
|
|
|
|
|
edge_bad.from = edge_bad_from;
|
|
|
|
|
edge_bad.from_len = 1;
|
|
|
|
|
edge_bad_to[0] = env->node_b;
|
|
|
|
|
edge_bad.to = edge_bad_to;
|
|
|
|
|
edge_bad.to_len = 1;
|
|
|
|
|
edge_bad.payload = env->payload_bad;
|
|
|
|
|
|
|
|
|
|
if (!amduat_enc_tgk1_edge_encode_v1(&edge1, &env->edge_bytes[0]) ||
|
|
|
|
|
!amduat_enc_tgk1_edge_encode_v1(&edge2, &env->edge_bytes[1]) ||
|
|
|
|
|
!amduat_enc_tgk1_edge_encode_v1(&edge3, &env->edge_bytes[2]) ||
|
|
|
|
|
!amduat_enc_tgk1_edge_encode_v1(&edge_bad, &env->edge_bytes[3])) {
|
|
|
|
|
free_edge_bytes(env);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
env->artifacts[0].ref = env->ref_edge1;
|
|
|
|
|
env->artifacts[0].artifact =
|
|
|
|
|
amduat_artifact_with_type(env->edge_bytes[0],
|
|
|
|
|
amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
|
|
|
|
env->artifacts[1].ref = env->ref_edge2;
|
|
|
|
|
env->artifacts[1].artifact =
|
|
|
|
|
amduat_artifact_with_type(env->edge_bytes[1],
|
|
|
|
|
amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
|
|
|
|
env->artifacts[2].ref = env->ref_edge3;
|
|
|
|
|
env->artifacts[2].artifact =
|
|
|
|
|
amduat_artifact_with_type(env->edge_bytes[2],
|
|
|
|
|
amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
|
|
|
|
env->artifacts[3].ref = env->ref_edge_bad;
|
|
|
|
|
env->artifacts[3].artifact =
|
|
|
|
|
amduat_artifact_with_type(env->edge_bytes[3],
|
|
|
|
|
amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
|
|
|
|
env->artifacts[4].ref = env->ref_non_edge;
|
|
|
|
|
env->artifacts[4].artifact =
|
|
|
|
|
amduat_artifact(amduat_octets(k_non_edge_bytes, sizeof(k_non_edge_bytes)));
|
2025-12-21 19:46:56 +01:00
|
|
|
env->artifacts[5].ref = env->ref_invalid_tagged;
|
|
|
|
|
env->artifacts[5].artifact =
|
|
|
|
|
amduat_artifact_with_type(amduat_octets(k_non_edge_bytes,
|
|
|
|
|
sizeof(k_non_edge_bytes)),
|
|
|
|
|
amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
2025-12-21 08:44:04 +01:00
|
|
|
|
2025-12-21 19:46:56 +01:00
|
|
|
if (!amduat_tgk_store_mem_init(&env->mem, env->config, env->artifacts, 6)) {
|
2025-12-21 08:44:04 +01:00
|
|
|
free_edge_bytes(env);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
amduat_tgk_store_init(&env->store, env->config, amduat_tgk_store_mem_ops(),
|
|
|
|
|
&env->mem);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void free_env(test_env_t *env) {
|
|
|
|
|
amduat_tgk_store_mem_free(&env->mem);
|
|
|
|
|
free_edge_bytes(env);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int test_resolve_edge_ok(const test_env_t *env) {
|
|
|
|
|
amduat_tgk_edge_body_t body;
|
|
|
|
|
amduat_tgk_graph_error_t err;
|
|
|
|
|
|
|
|
|
|
err = amduat_tgk_store_resolve_edge((amduat_tgk_store_t *)&env->store,
|
|
|
|
|
env->ref_edge1, &body);
|
|
|
|
|
if (err != 0) {
|
|
|
|
|
fprintf(stderr, "resolve_edge ok failed: %d\n", err);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (body.type != 0x10 || body.from_len != 1 || body.to_len != 1) {
|
|
|
|
|
fprintf(stderr, "resolve_edge body mismatch\n");
|
|
|
|
|
amduat_tgk_edge_body_free(&body);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (!amduat_reference_eq(body.from[0], env->node_a) ||
|
|
|
|
|
!amduat_reference_eq(body.to[0], env->node_b)) {
|
|
|
|
|
fprintf(stderr, "resolve_edge endpoints mismatch\n");
|
|
|
|
|
amduat_tgk_edge_body_free(&body);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_edge_body_free(&body);
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int test_resolve_edge_not_edge(const test_env_t *env) {
|
|
|
|
|
amduat_tgk_edge_body_t body;
|
|
|
|
|
amduat_tgk_graph_error_t err;
|
|
|
|
|
|
|
|
|
|
err = amduat_tgk_store_resolve_edge((amduat_tgk_store_t *)&env->store,
|
|
|
|
|
env->ref_edge_bad, &body);
|
|
|
|
|
if (err != GS_ERR_NOT_EDGE) {
|
|
|
|
|
fprintf(stderr, "resolve_edge not_edge mismatch: %d\n", err);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 19:46:56 +01:00
|
|
|
static int test_resolve_edge_invalid_bytes(const test_env_t *env) {
|
|
|
|
|
amduat_tgk_edge_body_t body;
|
|
|
|
|
amduat_tgk_graph_error_t err;
|
|
|
|
|
|
|
|
|
|
err = amduat_tgk_store_resolve_edge((amduat_tgk_store_t *)&env->store,
|
|
|
|
|
env->ref_invalid_tagged, &body);
|
|
|
|
|
if (err != GS_ERR_NOT_EDGE) {
|
|
|
|
|
fprintf(stderr, "resolve_edge invalid bytes mismatch: %d\n", err);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 19:23:41 +01:00
|
|
|
static int test_resolve_edge_missing(const test_env_t *env) {
|
|
|
|
|
amduat_tgk_edge_body_t body;
|
|
|
|
|
amduat_tgk_graph_error_t err;
|
|
|
|
|
|
|
|
|
|
err = amduat_tgk_store_resolve_edge((amduat_tgk_store_t *)&env->store,
|
|
|
|
|
env->ref_missing, &body);
|
|
|
|
|
if (err != GS_ERR_ARTIFACT_ERROR) {
|
|
|
|
|
fprintf(stderr, "resolve_edge missing mismatch: %d\n", err);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static bool make_ref_for_hash(amduat_hash_id_t hash_id,
|
|
|
|
|
uint8_t value,
|
|
|
|
|
amduat_reference_t *out_ref) {
|
|
|
|
|
const amduat_hash_asl1_desc_t *desc;
|
|
|
|
|
uint8_t *digest;
|
|
|
|
|
|
|
|
|
|
if (out_ref == NULL) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
desc = amduat_hash_asl1_desc_lookup(hash_id);
|
|
|
|
|
if (desc == NULL || desc->digest_len == 0) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
digest = (uint8_t *)malloc(desc->digest_len);
|
|
|
|
|
if (digest == NULL) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
memset(digest, value, desc->digest_len);
|
|
|
|
|
*out_ref = amduat_reference(hash_id, amduat_octets(digest, desc->digest_len));
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 19:46:56 +01:00
|
|
|
static int test_init_rejects_unsupported_encoding(void) {
|
|
|
|
|
amduat_tgk_store_mem_t mem;
|
|
|
|
|
amduat_tgk_store_config_t config;
|
|
|
|
|
amduat_asl_encoding_profile_id_t encodings[1];
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
encodings[0] = 0x9999u;
|
|
|
|
|
config.tgk_profiles.encodings = encodings;
|
|
|
|
|
config.tgk_profiles.encodings_len = 1;
|
|
|
|
|
|
|
|
|
|
if (amduat_tgk_store_mem_init(&mem, config, NULL, 0)) {
|
|
|
|
|
fprintf(stderr, "init accepted unsupported encoding\n");
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 20:24:34 +01:00
|
|
|
static int test_init_rejects_duplicate_hash_id(void) {
|
|
|
|
|
amduat_tgk_store_mem_t mem;
|
|
|
|
|
amduat_tgk_store_config_t config;
|
|
|
|
|
amduat_tgk_identity_domain_t domains[2];
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
domains[0].encoding_profile = AMDUAT_ENC_ASL1_CORE_V1;
|
|
|
|
|
domains[0].hash_id = AMDUAT_HASH_ASL1_ID_SHA256;
|
|
|
|
|
domains[1].encoding_profile = AMDUAT_ENC_ASL1_CORE_V1;
|
|
|
|
|
domains[1].hash_id = AMDUAT_HASH_ASL1_ID_SHA256;
|
|
|
|
|
|
|
|
|
|
config.id_space.domains = domains;
|
|
|
|
|
config.id_space.domains_len = 2;
|
|
|
|
|
|
|
|
|
|
if (amduat_tgk_store_mem_init(&mem, config, NULL, 0)) {
|
|
|
|
|
fprintf(stderr, "init accepted duplicate hash_id domains\n");
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-22 11:42:41 +01:00
|
|
|
static int test_init_rejects_invalid_profile_config(void) {
|
|
|
|
|
amduat_tgk_store_mem_t mem;
|
|
|
|
|
amduat_tgk_store_config_t config;
|
|
|
|
|
amduat_asl_encoding_profile_id_t encodings[1];
|
|
|
|
|
uint32_t edge_tags[2];
|
|
|
|
|
amduat_tgk_edge_type_id_t edge_types[2];
|
|
|
|
|
|
|
|
|
|
encodings[0] = TGK1_EDGE_ENC_V1;
|
|
|
|
|
edge_tags[0] = TYPE_TAG_TGK1_EDGE_V1;
|
|
|
|
|
edge_tags[1] = TYPE_TAG_TGK1_EDGE_V1;
|
|
|
|
|
edge_types[0] = 0x10;
|
|
|
|
|
edge_types[1] = 0x10;
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
config.tgk_profiles.encodings = encodings;
|
|
|
|
|
config.tgk_profiles.encodings_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_tags_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_tags = NULL;
|
|
|
|
|
|
|
|
|
|
if (amduat_tgk_store_mem_init(&mem, config, NULL, 0)) {
|
|
|
|
|
fprintf(stderr, "init accepted null edge_tags\n");
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
config.tgk_profiles.encodings = encodings;
|
|
|
|
|
config.tgk_profiles.encodings_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_types_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_types = NULL;
|
|
|
|
|
|
|
|
|
|
if (amduat_tgk_store_mem_init(&mem, config, NULL, 0)) {
|
|
|
|
|
fprintf(stderr, "init accepted null edge_types\n");
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
config.tgk_profiles.encodings = encodings;
|
|
|
|
|
config.tgk_profiles.encodings_len = 1;
|
|
|
|
|
|
|
|
|
|
if (amduat_tgk_store_mem_init(&mem, config, NULL, 0)) {
|
|
|
|
|
fprintf(stderr, "init accepted encoding without edge tags\n");
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
config.tgk_profiles.edge_tags = edge_tags;
|
|
|
|
|
config.tgk_profiles.edge_tags_len = 1;
|
|
|
|
|
|
|
|
|
|
if (amduat_tgk_store_mem_init(&mem, config, NULL, 0)) {
|
|
|
|
|
fprintf(stderr, "init accepted edge tag without encoding\n");
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
config.tgk_profiles.edge_tags = edge_tags;
|
|
|
|
|
config.tgk_profiles.edge_tags_len = 2;
|
|
|
|
|
|
|
|
|
|
if (amduat_tgk_store_mem_init(&mem, config, NULL, 0)) {
|
|
|
|
|
fprintf(stderr, "init accepted duplicate edge tags\n");
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
config.tgk_profiles.edge_types = edge_types;
|
|
|
|
|
config.tgk_profiles.edge_types_len = 2;
|
|
|
|
|
|
|
|
|
|
if (amduat_tgk_store_mem_init(&mem, config, NULL, 0)) {
|
|
|
|
|
fprintf(stderr, "init accepted duplicate edge types\n");
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 20:24:34 +01:00
|
|
|
static int test_duplicate_edge_ref_same_artifact(void) {
|
|
|
|
|
amduat_tgk_store_mem_t mem;
|
|
|
|
|
amduat_tgk_store_t store;
|
|
|
|
|
amduat_tgk_store_config_t config;
|
|
|
|
|
amduat_tgk_identity_domain_t domains[1];
|
|
|
|
|
uint32_t edge_tags[1];
|
|
|
|
|
amduat_tgk_edge_type_id_t edge_types[1];
|
|
|
|
|
amduat_asl_encoding_profile_id_t encodings[1];
|
|
|
|
|
amduat_tgk_store_mem_artifact_t artifacts[2];
|
|
|
|
|
amduat_tgk_edge_body_t edge;
|
|
|
|
|
amduat_reference_t from_refs[1];
|
|
|
|
|
amduat_reference_t to_refs[1];
|
|
|
|
|
amduat_reference_t ref_edge;
|
|
|
|
|
amduat_reference_t node_a;
|
|
|
|
|
amduat_reference_t node_b;
|
|
|
|
|
amduat_reference_t payload;
|
|
|
|
|
amduat_octets_t edge_bytes;
|
|
|
|
|
amduat_tgk_edge_body_t body;
|
|
|
|
|
amduat_tgk_graph_scan_result_t scan;
|
|
|
|
|
uint8_t digest_edge[32];
|
|
|
|
|
uint8_t digest_a[32];
|
|
|
|
|
uint8_t digest_b[32];
|
|
|
|
|
uint8_t digest_payload[32];
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
memset(&edge, 0, sizeof(edge));
|
|
|
|
|
edge_bytes = amduat_octets(NULL, 0);
|
|
|
|
|
|
|
|
|
|
domains[0].encoding_profile = AMDUAT_ENC_ASL1_CORE_V1;
|
|
|
|
|
domains[0].hash_id = AMDUAT_HASH_ASL1_ID_SHA256;
|
|
|
|
|
edge_tags[0] = TYPE_TAG_TGK1_EDGE_V1;
|
|
|
|
|
edge_types[0] = 0x10;
|
|
|
|
|
encodings[0] = TGK1_EDGE_ENC_V1;
|
|
|
|
|
|
|
|
|
|
config.id_space.domains = domains;
|
|
|
|
|
config.id_space.domains_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_tags = edge_tags;
|
|
|
|
|
config.tgk_profiles.edge_tags_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_types = edge_types;
|
|
|
|
|
config.tgk_profiles.edge_types_len = 1;
|
|
|
|
|
config.tgk_profiles.encodings = encodings;
|
|
|
|
|
config.tgk_profiles.encodings_len = 1;
|
|
|
|
|
|
|
|
|
|
ref_edge = make_ref(0x71, digest_edge);
|
|
|
|
|
node_a = make_ref(0xa1, digest_a);
|
|
|
|
|
node_b = make_ref(0xb1, digest_b);
|
|
|
|
|
payload = make_ref(0xe1, digest_payload);
|
|
|
|
|
|
|
|
|
|
edge.type = 0x10;
|
|
|
|
|
from_refs[0] = node_a;
|
|
|
|
|
edge.from = from_refs;
|
|
|
|
|
edge.from_len = 1;
|
|
|
|
|
to_refs[0] = node_b;
|
|
|
|
|
edge.to = to_refs;
|
|
|
|
|
edge.to_len = 1;
|
|
|
|
|
edge.payload = payload;
|
|
|
|
|
|
|
|
|
|
if (!amduat_enc_tgk1_edge_encode_v1(&edge, &edge_bytes)) {
|
|
|
|
|
fprintf(stderr, "duplicate ref encode failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
artifacts[0].ref = ref_edge;
|
|
|
|
|
artifacts[0].artifact =
|
|
|
|
|
amduat_artifact_with_type(edge_bytes,
|
|
|
|
|
amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
|
|
|
|
artifacts[1].ref = ref_edge;
|
|
|
|
|
artifacts[1].artifact =
|
|
|
|
|
amduat_artifact_with_type(edge_bytes,
|
|
|
|
|
amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_mem_init(&mem, config, artifacts, 2)) {
|
|
|
|
|
fprintf(stderr, "init rejected duplicate identical artifacts\n");
|
|
|
|
|
free((void *)edge_bytes.data);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
amduat_tgk_store_init(&store, config, amduat_tgk_store_mem_ops(), &mem);
|
|
|
|
|
|
|
|
|
|
if (amduat_tgk_store_resolve_edge(&store, ref_edge, &body) != 0) {
|
|
|
|
|
fprintf(stderr, "resolve_edge failed for duplicate ref\n");
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
free((void *)edge_bytes.data);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_edge_body_free(&body);
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_scan_edges(&store,
|
|
|
|
|
(amduat_tgk_edge_type_filter_t){0},
|
|
|
|
|
amduat_octets(NULL, 0), false, &scan)) {
|
|
|
|
|
fprintf(stderr, "scan_edges failed for duplicate ref\n");
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
free((void *)edge_bytes.data);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (scan.edges.len != 1) {
|
|
|
|
|
fprintf(stderr, "duplicate ref scan count mismatch\n");
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
free((void *)edge_bytes.data);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
free((void *)edge_bytes.data);
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int test_duplicate_edge_ref_conflict(void) {
|
|
|
|
|
amduat_tgk_store_mem_t mem;
|
|
|
|
|
amduat_tgk_store_config_t config;
|
|
|
|
|
amduat_tgk_identity_domain_t domains[1];
|
|
|
|
|
uint32_t edge_tags[1];
|
|
|
|
|
amduat_tgk_edge_type_id_t edge_types[1];
|
|
|
|
|
amduat_asl_encoding_profile_id_t encodings[1];
|
|
|
|
|
amduat_tgk_store_mem_artifact_t artifacts[2];
|
|
|
|
|
amduat_tgk_edge_body_t edge_a;
|
|
|
|
|
amduat_tgk_edge_body_t edge_b;
|
|
|
|
|
amduat_reference_t from_refs[1];
|
|
|
|
|
amduat_reference_t to_refs[1];
|
|
|
|
|
amduat_reference_t ref_edge;
|
|
|
|
|
amduat_reference_t node_a;
|
|
|
|
|
amduat_reference_t node_b;
|
|
|
|
|
amduat_reference_t payload_a;
|
|
|
|
|
amduat_reference_t payload_b;
|
|
|
|
|
amduat_octets_t bytes_a;
|
|
|
|
|
amduat_octets_t bytes_b;
|
|
|
|
|
uint8_t digest_edge[32];
|
|
|
|
|
uint8_t digest_a[32];
|
|
|
|
|
uint8_t digest_b[32];
|
|
|
|
|
uint8_t digest_payload_a[32];
|
|
|
|
|
uint8_t digest_payload_b[32];
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
memset(&edge_a, 0, sizeof(edge_a));
|
|
|
|
|
memset(&edge_b, 0, sizeof(edge_b));
|
|
|
|
|
bytes_a = amduat_octets(NULL, 0);
|
|
|
|
|
bytes_b = amduat_octets(NULL, 0);
|
|
|
|
|
|
|
|
|
|
domains[0].encoding_profile = AMDUAT_ENC_ASL1_CORE_V1;
|
|
|
|
|
domains[0].hash_id = AMDUAT_HASH_ASL1_ID_SHA256;
|
|
|
|
|
edge_tags[0] = TYPE_TAG_TGK1_EDGE_V1;
|
|
|
|
|
edge_types[0] = 0x10;
|
|
|
|
|
encodings[0] = TGK1_EDGE_ENC_V1;
|
|
|
|
|
|
|
|
|
|
config.id_space.domains = domains;
|
|
|
|
|
config.id_space.domains_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_tags = edge_tags;
|
|
|
|
|
config.tgk_profiles.edge_tags_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_types = edge_types;
|
|
|
|
|
config.tgk_profiles.edge_types_len = 1;
|
|
|
|
|
config.tgk_profiles.encodings = encodings;
|
|
|
|
|
config.tgk_profiles.encodings_len = 1;
|
|
|
|
|
|
|
|
|
|
ref_edge = make_ref(0x72, digest_edge);
|
|
|
|
|
node_a = make_ref(0xa1, digest_a);
|
|
|
|
|
node_b = make_ref(0xb1, digest_b);
|
|
|
|
|
payload_a = make_ref(0xe1, digest_payload_a);
|
|
|
|
|
payload_b = make_ref(0xe2, digest_payload_b);
|
|
|
|
|
|
|
|
|
|
edge_a.type = 0x10;
|
|
|
|
|
from_refs[0] = node_a;
|
|
|
|
|
edge_a.from = from_refs;
|
|
|
|
|
edge_a.from_len = 1;
|
|
|
|
|
to_refs[0] = node_b;
|
|
|
|
|
edge_a.to = to_refs;
|
|
|
|
|
edge_a.to_len = 1;
|
|
|
|
|
edge_a.payload = payload_a;
|
|
|
|
|
|
|
|
|
|
edge_b.type = 0x10;
|
|
|
|
|
edge_b.from = from_refs;
|
|
|
|
|
edge_b.from_len = 1;
|
|
|
|
|
edge_b.to = to_refs;
|
|
|
|
|
edge_b.to_len = 1;
|
|
|
|
|
edge_b.payload = payload_b;
|
|
|
|
|
|
|
|
|
|
if (!amduat_enc_tgk1_edge_encode_v1(&edge_a, &bytes_a) ||
|
|
|
|
|
!amduat_enc_tgk1_edge_encode_v1(&edge_b, &bytes_b)) {
|
|
|
|
|
fprintf(stderr, "conflict encode failed\n");
|
|
|
|
|
free((void *)bytes_a.data);
|
|
|
|
|
free((void *)bytes_b.data);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
artifacts[0].ref = ref_edge;
|
|
|
|
|
artifacts[0].artifact =
|
|
|
|
|
amduat_artifact_with_type(bytes_a,
|
|
|
|
|
amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
|
|
|
|
artifacts[1].ref = ref_edge;
|
|
|
|
|
artifacts[1].artifact =
|
|
|
|
|
amduat_artifact_with_type(bytes_b,
|
|
|
|
|
amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
|
|
|
|
|
|
|
|
|
if (amduat_tgk_store_mem_init(&mem, config, artifacts, 2)) {
|
|
|
|
|
fprintf(stderr, "init accepted conflicting duplicate artifacts\n");
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
free((void *)bytes_a.data);
|
|
|
|
|
free((void *)bytes_b.data);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
free((void *)bytes_a.data);
|
|
|
|
|
free((void *)bytes_b.data);
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 20:36:19 +01:00
|
|
|
static int ref_cmp(const void *a, const void *b) {
|
|
|
|
|
const amduat_reference_t *ref_a = (const amduat_reference_t *)a;
|
|
|
|
|
const amduat_reference_t *ref_b = (const amduat_reference_t *)b;
|
|
|
|
|
size_t min_len;
|
|
|
|
|
int cmp;
|
|
|
|
|
|
|
|
|
|
if (ref_a->hash_id < ref_b->hash_id) {
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
if (ref_a->hash_id > ref_b->hash_id) {
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
min_len = ref_a->digest.len < ref_b->digest.len ? ref_a->digest.len
|
|
|
|
|
: ref_b->digest.len;
|
|
|
|
|
if (min_len != 0 && ref_a->digest.data != NULL &&
|
|
|
|
|
ref_b->digest.data != NULL) {
|
|
|
|
|
cmp = memcmp(ref_a->digest.data, ref_b->digest.data, min_len);
|
|
|
|
|
if (cmp != 0) {
|
|
|
|
|
return cmp;
|
|
|
|
|
}
|
|
|
|
|
} else if (min_len != 0) {
|
|
|
|
|
return (ref_a->digest.data == NULL) ? -1 : 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (ref_a->digest.len < ref_b->digest.len) {
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
if (ref_a->digest.len > ref_b->digest.len) {
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static amduat_reference_t make_index_ref(uint16_t index, uint8_t *storage) {
|
|
|
|
|
memset(storage, 0, 32);
|
|
|
|
|
storage[0] = (uint8_t)((index >> 8) & 0xffu);
|
|
|
|
|
storage[1] = (uint8_t)(index & 0xffu);
|
|
|
|
|
return amduat_reference(AMDUAT_HASH_ASL1_ID_SHA256,
|
|
|
|
|
amduat_octets(storage, 32));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int test_scan_edges_pagination(void) {
|
|
|
|
|
const size_t edge_count = 300;
|
|
|
|
|
amduat_tgk_store_mem_t mem;
|
|
|
|
|
amduat_tgk_store_t store;
|
|
|
|
|
amduat_tgk_store_config_t config;
|
|
|
|
|
amduat_tgk_identity_domain_t domains[1];
|
|
|
|
|
uint32_t edge_tags[1];
|
|
|
|
|
amduat_tgk_edge_type_id_t edge_types[1];
|
|
|
|
|
amduat_asl_encoding_profile_id_t encodings[1];
|
|
|
|
|
amduat_tgk_store_mem_artifact_t *artifacts = NULL;
|
|
|
|
|
amduat_octets_t *edge_bytes = NULL;
|
|
|
|
|
amduat_reference_t *expected = NULL;
|
|
|
|
|
uint8_t *digests = NULL;
|
|
|
|
|
amduat_reference_t node_a;
|
|
|
|
|
amduat_reference_t node_b;
|
|
|
|
|
amduat_reference_t payload;
|
|
|
|
|
amduat_tgk_edge_body_t edge;
|
|
|
|
|
amduat_reference_t from_refs[1];
|
|
|
|
|
amduat_reference_t to_refs[1];
|
|
|
|
|
uint8_t digest_a[32];
|
|
|
|
|
uint8_t digest_b[32];
|
|
|
|
|
uint8_t digest_payload[32];
|
|
|
|
|
amduat_octets_t page_token;
|
|
|
|
|
bool has_page_token = false;
|
|
|
|
|
size_t seen = 0;
|
|
|
|
|
size_t pages = 0;
|
|
|
|
|
size_t i;
|
|
|
|
|
int exit_code = 1;
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
memset(&edge, 0, sizeof(edge));
|
|
|
|
|
page_token = amduat_octets(NULL, 0);
|
|
|
|
|
|
|
|
|
|
artifacts = (amduat_tgk_store_mem_artifact_t *)calloc(
|
|
|
|
|
edge_count, sizeof(*artifacts));
|
|
|
|
|
edge_bytes = (amduat_octets_t *)calloc(edge_count, sizeof(*edge_bytes));
|
|
|
|
|
expected = (amduat_reference_t *)calloc(edge_count, sizeof(*expected));
|
|
|
|
|
digests = (uint8_t *)calloc(edge_count, 32);
|
|
|
|
|
if (artifacts == NULL || edge_bytes == NULL ||
|
|
|
|
|
expected == NULL || digests == NULL) {
|
|
|
|
|
fprintf(stderr, "pagination alloc failed\n");
|
|
|
|
|
goto cleanup;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
domains[0].encoding_profile = AMDUAT_ENC_ASL1_CORE_V1;
|
|
|
|
|
domains[0].hash_id = AMDUAT_HASH_ASL1_ID_SHA256;
|
|
|
|
|
edge_tags[0] = TYPE_TAG_TGK1_EDGE_V1;
|
|
|
|
|
edge_types[0] = 0x10;
|
|
|
|
|
encodings[0] = TGK1_EDGE_ENC_V1;
|
|
|
|
|
|
|
|
|
|
config.id_space.domains = domains;
|
|
|
|
|
config.id_space.domains_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_tags = edge_tags;
|
|
|
|
|
config.tgk_profiles.edge_tags_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_types = edge_types;
|
|
|
|
|
config.tgk_profiles.edge_types_len = 1;
|
|
|
|
|
config.tgk_profiles.encodings = encodings;
|
|
|
|
|
config.tgk_profiles.encodings_len = 1;
|
|
|
|
|
|
|
|
|
|
node_a = make_ref(0xa1, digest_a);
|
|
|
|
|
node_b = make_ref(0xb1, digest_b);
|
|
|
|
|
payload = make_ref(0xe1, digest_payload);
|
|
|
|
|
|
|
|
|
|
edge.type = 0x10;
|
|
|
|
|
from_refs[0] = node_a;
|
|
|
|
|
edge.from = from_refs;
|
|
|
|
|
edge.from_len = 1;
|
|
|
|
|
to_refs[0] = node_b;
|
|
|
|
|
edge.to = to_refs;
|
|
|
|
|
edge.to_len = 1;
|
|
|
|
|
edge.payload = payload;
|
|
|
|
|
|
|
|
|
|
for (i = 0; i < edge_count; ++i) {
|
|
|
|
|
expected[i] = make_index_ref((uint16_t)i, digests + (i * 32));
|
|
|
|
|
if (!amduat_enc_tgk1_edge_encode_v1(&edge, &edge_bytes[i])) {
|
|
|
|
|
fprintf(stderr, "pagination encode failed\n");
|
|
|
|
|
goto cleanup;
|
|
|
|
|
}
|
|
|
|
|
artifacts[i].ref = expected[i];
|
|
|
|
|
artifacts[i].artifact =
|
|
|
|
|
amduat_artifact_with_type(edge_bytes[i],
|
|
|
|
|
amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_mem_init(&mem, config, artifacts, edge_count)) {
|
|
|
|
|
fprintf(stderr, "pagination init failed\n");
|
|
|
|
|
goto cleanup;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_store_init(&store, config, amduat_tgk_store_mem_ops(), &mem);
|
|
|
|
|
|
|
|
|
|
qsort(expected, edge_count, sizeof(*expected), ref_cmp);
|
|
|
|
|
|
|
|
|
|
for (;;) {
|
|
|
|
|
amduat_tgk_graph_scan_result_t scan;
|
|
|
|
|
bool has_next;
|
|
|
|
|
amduat_octets_t next_token;
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_scan_edges(&store,
|
|
|
|
|
(amduat_tgk_edge_type_filter_t){0},
|
|
|
|
|
page_token, has_page_token, &scan)) {
|
|
|
|
|
fprintf(stderr, "pagination scan failed\n");
|
|
|
|
|
goto cleanup_store;
|
|
|
|
|
}
|
|
|
|
|
pages++;
|
|
|
|
|
if (seen + scan.edges.len > edge_count) {
|
|
|
|
|
fprintf(stderr, "pagination overflow\n");
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
goto cleanup_store;
|
|
|
|
|
}
|
|
|
|
|
for (i = 0; i < scan.edges.len; ++i) {
|
|
|
|
|
if (!amduat_reference_eq(scan.edges.edges[i].edge_ref,
|
|
|
|
|
expected[seen + i])) {
|
|
|
|
|
fprintf(stderr, "pagination order mismatch\n");
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
goto cleanup_store;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
seen += scan.edges.len;
|
|
|
|
|
has_next = scan.has_next_page;
|
|
|
|
|
next_token = scan.next_page_token;
|
|
|
|
|
|
|
|
|
|
if (has_next) {
|
|
|
|
|
uint8_t *token_copy;
|
|
|
|
|
if (next_token.len == 0 || next_token.data == NULL) {
|
|
|
|
|
fprintf(stderr, "pagination empty token\n");
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
goto cleanup_store;
|
|
|
|
|
}
|
|
|
|
|
token_copy = (uint8_t *)malloc(next_token.len);
|
|
|
|
|
if (token_copy == NULL) {
|
|
|
|
|
fprintf(stderr, "pagination token alloc failed\n");
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
goto cleanup_store;
|
|
|
|
|
}
|
|
|
|
|
memcpy(token_copy, next_token.data, next_token.len);
|
|
|
|
|
free((void *)page_token.data);
|
|
|
|
|
page_token = amduat_octets(token_copy, next_token.len);
|
|
|
|
|
has_page_token = true;
|
|
|
|
|
} else {
|
|
|
|
|
free((void *)page_token.data);
|
|
|
|
|
page_token = amduat_octets(NULL, 0);
|
|
|
|
|
has_page_token = false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
if (!has_next) {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (pages < 2) {
|
|
|
|
|
fprintf(stderr, "pagination did not paginate\n");
|
|
|
|
|
goto cleanup_store;
|
|
|
|
|
}
|
|
|
|
|
if (seen != edge_count) {
|
|
|
|
|
fprintf(stderr, "pagination count mismatch\n");
|
|
|
|
|
goto cleanup_store;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
exit_code = 0;
|
|
|
|
|
|
|
|
|
|
cleanup_store:
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
cleanup:
|
|
|
|
|
free((void *)page_token.data);
|
|
|
|
|
if (edge_bytes != NULL) {
|
|
|
|
|
for (i = 0; i < edge_count; ++i) {
|
|
|
|
|
free((void *)edge_bytes[i].data);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
free(edge_bytes);
|
|
|
|
|
free(artifacts);
|
|
|
|
|
free(expected);
|
|
|
|
|
free(digests);
|
|
|
|
|
return exit_code;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 21:23:55 +01:00
|
|
|
static int test_ingest_remove_epoch(void) {
|
|
|
|
|
amduat_tgk_store_mem_t mem;
|
|
|
|
|
amduat_tgk_store_t store;
|
|
|
|
|
amduat_tgk_store_config_t config;
|
|
|
|
|
amduat_tgk_identity_domain_t domains[1];
|
|
|
|
|
uint32_t edge_tags[1];
|
|
|
|
|
amduat_tgk_edge_type_id_t edge_types[1];
|
|
|
|
|
amduat_asl_encoding_profile_id_t encodings[1];
|
|
|
|
|
amduat_tgk_store_mem_artifact_t artifacts[1];
|
|
|
|
|
amduat_octets_t edge_bytes;
|
|
|
|
|
amduat_reference_t ref_edge;
|
|
|
|
|
amduat_reference_t node_a;
|
|
|
|
|
amduat_reference_t node_b;
|
|
|
|
|
amduat_reference_t payload;
|
|
|
|
|
uint8_t digest_edge[32];
|
|
|
|
|
uint8_t digest_a[32];
|
|
|
|
|
uint8_t digest_b[32];
|
|
|
|
|
uint8_t digest_payload[32];
|
|
|
|
|
amduat_tgk_edge_body_t edge;
|
|
|
|
|
amduat_reference_t edge_from[1];
|
|
|
|
|
amduat_reference_t edge_to[1];
|
|
|
|
|
amduat_tgk_snapshot_id_t epoch_before = 0;
|
|
|
|
|
amduat_tgk_snapshot_id_t epoch_after = 0;
|
|
|
|
|
amduat_tgk_graph_scan_result_t scan;
|
|
|
|
|
int exit_code = 1;
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
memset(&edge, 0, sizeof(edge));
|
|
|
|
|
edge_bytes = amduat_octets(NULL, 0);
|
|
|
|
|
|
|
|
|
|
domains[0].encoding_profile = AMDUAT_ENC_ASL1_CORE_V1;
|
|
|
|
|
domains[0].hash_id = AMDUAT_HASH_ASL1_ID_SHA256;
|
|
|
|
|
edge_tags[0] = TYPE_TAG_TGK1_EDGE_V1;
|
|
|
|
|
edge_types[0] = 0x10;
|
|
|
|
|
encodings[0] = TGK1_EDGE_ENC_V1;
|
|
|
|
|
|
|
|
|
|
config.id_space.domains = domains;
|
|
|
|
|
config.id_space.domains_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_tags = edge_tags;
|
|
|
|
|
config.tgk_profiles.edge_tags_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_types = edge_types;
|
|
|
|
|
config.tgk_profiles.edge_types_len = 1;
|
|
|
|
|
config.tgk_profiles.encodings = encodings;
|
|
|
|
|
config.tgk_profiles.encodings_len = 1;
|
|
|
|
|
|
|
|
|
|
ref_edge = make_ref(0x71, digest_edge);
|
|
|
|
|
node_a = make_ref(0xa1, digest_a);
|
|
|
|
|
node_b = make_ref(0xb1, digest_b);
|
|
|
|
|
payload = make_ref(0xe1, digest_payload);
|
|
|
|
|
|
|
|
|
|
edge.type = 0x10;
|
|
|
|
|
edge_from[0] = node_a;
|
|
|
|
|
edge.from = edge_from;
|
|
|
|
|
edge.from_len = 1;
|
|
|
|
|
edge_to[0] = node_b;
|
|
|
|
|
edge.to = edge_to;
|
|
|
|
|
edge.to_len = 1;
|
|
|
|
|
edge.payload = payload;
|
|
|
|
|
|
|
|
|
|
if (!amduat_enc_tgk1_edge_encode_v1(&edge, &edge_bytes)) {
|
|
|
|
|
fprintf(stderr, "epoch encode failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
artifacts[0].ref = ref_edge;
|
|
|
|
|
artifacts[0].artifact =
|
|
|
|
|
amduat_artifact_with_type(edge_bytes,
|
|
|
|
|
amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_mem_init(&mem, config, artifacts, 1)) {
|
|
|
|
|
fprintf(stderr, "epoch init failed\n");
|
|
|
|
|
goto cleanup;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_store_init(&store, config, amduat_tgk_store_mem_ops(), &mem);
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_snapshot_id(&store, &epoch_before)) {
|
|
|
|
|
fprintf(stderr, "epoch get failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_ingest_artifact(&store, ref_edge,
|
|
|
|
|
artifacts[0].artifact)) {
|
|
|
|
|
fprintf(stderr, "epoch ingest failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (!amduat_tgk_store_snapshot_id(&store, &epoch_after)) {
|
|
|
|
|
fprintf(stderr, "epoch get after ingest failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (epoch_after != epoch_before) {
|
|
|
|
|
fprintf(stderr, "epoch changed on no-op ingest\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_remove_artifact(&store, ref_edge)) {
|
|
|
|
|
fprintf(stderr, "epoch remove failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (!amduat_tgk_store_snapshot_id(&store, &epoch_after)) {
|
|
|
|
|
fprintf(stderr, "epoch get after remove failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (epoch_after != epoch_before + 1u) {
|
|
|
|
|
fprintf(stderr, "epoch did not increment on remove\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_scan_edges(&store,
|
|
|
|
|
(amduat_tgk_edge_type_filter_t){0},
|
|
|
|
|
amduat_octets(NULL, 0), false, &scan)) {
|
|
|
|
|
fprintf(stderr, "epoch scan failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (scan.edges.len != 0) {
|
|
|
|
|
fprintf(stderr, "epoch scan not empty after remove\n");
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_ingest_artifact(&store, ref_edge,
|
|
|
|
|
artifacts[0].artifact)) {
|
|
|
|
|
fprintf(stderr, "epoch re-ingest failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (!amduat_tgk_store_snapshot_id(&store, &epoch_after)) {
|
|
|
|
|
fprintf(stderr, "epoch get after re-ingest failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (epoch_after != epoch_before + 2u) {
|
|
|
|
|
fprintf(stderr, "epoch did not increment on ingest\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
exit_code = 0;
|
|
|
|
|
|
|
|
|
|
cleanup_mem:
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
cleanup:
|
|
|
|
|
free((void *)edge_bytes.data);
|
|
|
|
|
return exit_code;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 21:31:55 +01:00
|
|
|
static int test_ingest_batch_epoch(void) {
|
|
|
|
|
amduat_tgk_store_mem_t mem;
|
|
|
|
|
amduat_tgk_store_t store;
|
|
|
|
|
amduat_tgk_store_config_t config;
|
|
|
|
|
amduat_tgk_identity_domain_t domains[1];
|
|
|
|
|
uint32_t edge_tags[1];
|
|
|
|
|
amduat_tgk_edge_type_id_t edge_types[1];
|
|
|
|
|
amduat_asl_encoding_profile_id_t encodings[1];
|
|
|
|
|
amduat_reference_t refs[2];
|
|
|
|
|
amduat_artifact_t artifacts[2];
|
|
|
|
|
amduat_octets_t edge_bytes;
|
|
|
|
|
amduat_reference_t node_a;
|
|
|
|
|
amduat_reference_t node_b;
|
|
|
|
|
amduat_reference_t payload;
|
|
|
|
|
uint8_t digest_ref1[32];
|
|
|
|
|
uint8_t digest_ref2[32];
|
|
|
|
|
uint8_t digest_a[32];
|
|
|
|
|
uint8_t digest_b[32];
|
|
|
|
|
uint8_t digest_payload[32];
|
|
|
|
|
amduat_tgk_edge_body_t edge;
|
|
|
|
|
amduat_reference_t edge_from[1];
|
|
|
|
|
amduat_reference_t edge_to[1];
|
|
|
|
|
amduat_tgk_snapshot_id_t epoch_before = 0;
|
|
|
|
|
amduat_tgk_snapshot_id_t epoch_after = 0;
|
|
|
|
|
amduat_tgk_graph_scan_result_t scan;
|
|
|
|
|
int exit_code = 1;
|
|
|
|
|
|
|
|
|
|
memset(&mem, 0, sizeof(mem));
|
|
|
|
|
memset(&config, 0, sizeof(config));
|
|
|
|
|
memset(&edge, 0, sizeof(edge));
|
|
|
|
|
edge_bytes = amduat_octets(NULL, 0);
|
|
|
|
|
|
|
|
|
|
domains[0].encoding_profile = AMDUAT_ENC_ASL1_CORE_V1;
|
|
|
|
|
domains[0].hash_id = AMDUAT_HASH_ASL1_ID_SHA256;
|
|
|
|
|
edge_tags[0] = TYPE_TAG_TGK1_EDGE_V1;
|
|
|
|
|
edge_types[0] = 0x10;
|
|
|
|
|
encodings[0] = TGK1_EDGE_ENC_V1;
|
|
|
|
|
|
|
|
|
|
config.id_space.domains = domains;
|
|
|
|
|
config.id_space.domains_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_tags = edge_tags;
|
|
|
|
|
config.tgk_profiles.edge_tags_len = 1;
|
|
|
|
|
config.tgk_profiles.edge_types = edge_types;
|
|
|
|
|
config.tgk_profiles.edge_types_len = 1;
|
|
|
|
|
config.tgk_profiles.encodings = encodings;
|
|
|
|
|
config.tgk_profiles.encodings_len = 1;
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_mem_init(&mem, config, NULL, 0)) {
|
|
|
|
|
fprintf(stderr, "batch epoch init failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_store_init(&store, config, amduat_tgk_store_mem_ops(), &mem);
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_snapshot_id(&store, &epoch_before)) {
|
|
|
|
|
fprintf(stderr, "batch epoch get failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
refs[0] = make_ref(0x81, digest_ref1);
|
|
|
|
|
refs[1] = make_ref(0x82, digest_ref2);
|
|
|
|
|
node_a = make_ref(0xa1, digest_a);
|
|
|
|
|
node_b = make_ref(0xb1, digest_b);
|
|
|
|
|
payload = make_ref(0xe1, digest_payload);
|
|
|
|
|
|
|
|
|
|
edge.type = 0x10;
|
|
|
|
|
edge_from[0] = node_a;
|
|
|
|
|
edge.from = edge_from;
|
|
|
|
|
edge.from_len = 1;
|
|
|
|
|
edge_to[0] = node_b;
|
|
|
|
|
edge.to = edge_to;
|
|
|
|
|
edge.to_len = 1;
|
|
|
|
|
edge.payload = payload;
|
|
|
|
|
|
|
|
|
|
if (!amduat_enc_tgk1_edge_encode_v1(&edge, &edge_bytes)) {
|
|
|
|
|
fprintf(stderr, "batch epoch encode failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
artifacts[0] = amduat_artifact_with_type(
|
|
|
|
|
edge_bytes, amduat_type_tag(TYPE_TAG_TGK1_EDGE_V1));
|
|
|
|
|
artifacts[1] = artifacts[0];
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_ingest_batch(&store, refs, artifacts, 2)) {
|
|
|
|
|
fprintf(stderr, "batch ingest failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (!amduat_tgk_store_snapshot_id(&store, &epoch_after)) {
|
|
|
|
|
fprintf(stderr, "batch epoch after ingest failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (epoch_after != epoch_before + 1u) {
|
|
|
|
|
fprintf(stderr, "batch epoch did not increment\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_scan_edges(&store,
|
|
|
|
|
(amduat_tgk_edge_type_filter_t){0},
|
|
|
|
|
amduat_octets(NULL, 0), false, &scan)) {
|
|
|
|
|
fprintf(stderr, "batch scan failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (scan.edges.len != 2) {
|
|
|
|
|
fprintf(stderr, "batch scan count mismatch\n");
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_ingest_batch(&store, refs, artifacts, 2)) {
|
|
|
|
|
fprintf(stderr, "batch re-ingest failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (!amduat_tgk_store_snapshot_id(&store, &epoch_after)) {
|
|
|
|
|
fprintf(stderr, "batch epoch after re-ingest failed\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
if (epoch_after != epoch_before + 1u) {
|
|
|
|
|
fprintf(stderr, "batch epoch changed on no-op\n");
|
|
|
|
|
goto cleanup_mem;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
exit_code = 0;
|
|
|
|
|
|
|
|
|
|
cleanup_mem:
|
|
|
|
|
amduat_tgk_store_mem_free(&mem);
|
|
|
|
|
free((void *)edge_bytes.data);
|
|
|
|
|
return exit_code;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 21:52:06 +01:00
|
|
|
static int test_snapshot_consistency(void) {
|
|
|
|
|
test_env_t env;
|
|
|
|
|
amduat_tgk_store_snapshot_t snapshot;
|
|
|
|
|
amduat_tgk_snapshot_id_t snap_before = 0;
|
|
|
|
|
amduat_tgk_snapshot_id_t snap_after = 0;
|
|
|
|
|
amduat_tgk_snapshot_id_t store_epoch = 0;
|
|
|
|
|
amduat_tgk_graph_scan_result_t scan;
|
|
|
|
|
int exit_code = 1;
|
|
|
|
|
|
|
|
|
|
memset(&snapshot, 0, sizeof(snapshot));
|
|
|
|
|
if (!init_env(&env)) {
|
|
|
|
|
fprintf(stderr, "snapshot env init failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_snapshot(&env.store, &snapshot)) {
|
|
|
|
|
fprintf(stderr, "snapshot capture failed\n");
|
|
|
|
|
goto cleanup_env;
|
|
|
|
|
}
|
|
|
|
|
if (!amduat_tgk_store_snapshot_id(&snapshot.store, &snap_before)) {
|
|
|
|
|
fprintf(stderr, "snapshot id failed\n");
|
|
|
|
|
goto cleanup_snapshot;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_remove_artifact(&env.store, env.ref_edge3)) {
|
|
|
|
|
fprintf(stderr, "snapshot remove failed\n");
|
|
|
|
|
goto cleanup_snapshot;
|
|
|
|
|
}
|
|
|
|
|
if (!amduat_tgk_store_snapshot_id(&env.store, &store_epoch)) {
|
|
|
|
|
fprintf(stderr, "snapshot store epoch failed\n");
|
|
|
|
|
goto cleanup_snapshot;
|
|
|
|
|
}
|
|
|
|
|
if (!amduat_tgk_store_snapshot_id(&snapshot.store, &snap_after)) {
|
|
|
|
|
fprintf(stderr, "snapshot id recheck failed\n");
|
|
|
|
|
goto cleanup_snapshot;
|
|
|
|
|
}
|
|
|
|
|
if (snap_after != snap_before) {
|
|
|
|
|
fprintf(stderr, "snapshot epoch drifted\n");
|
|
|
|
|
goto cleanup_snapshot;
|
|
|
|
|
}
|
|
|
|
|
if (store_epoch == snap_before) {
|
|
|
|
|
fprintf(stderr, "store epoch did not advance\n");
|
|
|
|
|
goto cleanup_snapshot;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_scan_edges(&snapshot.store,
|
|
|
|
|
(amduat_tgk_edge_type_filter_t){0},
|
|
|
|
|
amduat_octets(NULL, 0), false, &scan)) {
|
|
|
|
|
fprintf(stderr, "snapshot scan failed\n");
|
|
|
|
|
goto cleanup_snapshot;
|
|
|
|
|
}
|
|
|
|
|
if (scan.edges.len != 3 ||
|
|
|
|
|
!amduat_reference_eq(scan.edges.edges[0].edge_ref, env.ref_edge2) ||
|
|
|
|
|
!amduat_reference_eq(scan.edges.edges[1].edge_ref, env.ref_edge1) ||
|
|
|
|
|
!amduat_reference_eq(scan.edges.edges[2].edge_ref, env.ref_edge3)) {
|
|
|
|
|
fprintf(stderr, "snapshot scan mismatch\n");
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
goto cleanup_snapshot;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_scan_edges(&env.store,
|
|
|
|
|
(amduat_tgk_edge_type_filter_t){0},
|
|
|
|
|
amduat_octets(NULL, 0), false, &scan)) {
|
|
|
|
|
fprintf(stderr, "store scan failed\n");
|
|
|
|
|
goto cleanup_snapshot;
|
|
|
|
|
}
|
|
|
|
|
if (scan.edges.len != 2 ||
|
|
|
|
|
!amduat_reference_eq(scan.edges.edges[0].edge_ref, env.ref_edge2) ||
|
|
|
|
|
!amduat_reference_eq(scan.edges.edges[1].edge_ref, env.ref_edge1)) {
|
|
|
|
|
fprintf(stderr, "store scan mismatch\n");
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
goto cleanup_snapshot;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
|
|
|
|
|
exit_code = 0;
|
|
|
|
|
|
|
|
|
|
cleanup_snapshot:
|
|
|
|
|
amduat_tgk_store_snapshot_free(&snapshot);
|
|
|
|
|
cleanup_env:
|
|
|
|
|
free_env(&env);
|
|
|
|
|
return exit_code;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 19:23:41 +01:00
|
|
|
static int test_resolve_edge_unsupported(const test_env_t *env) {
|
|
|
|
|
amduat_reference_t ref;
|
2025-12-22 00:21:16 +01:00
|
|
|
uint8_t *digest;
|
2025-12-21 19:23:41 +01:00
|
|
|
amduat_tgk_edge_body_t body;
|
|
|
|
|
amduat_tgk_graph_error_t err;
|
|
|
|
|
int exit_code = 1;
|
|
|
|
|
|
2025-12-22 00:21:16 +01:00
|
|
|
digest = (uint8_t *)malloc(32u);
|
|
|
|
|
if (digest == NULL) {
|
2025-12-21 19:23:41 +01:00
|
|
|
fprintf(stderr, "unsupported ref alloc failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
2025-12-22 00:21:16 +01:00
|
|
|
memset(digest, 0x6au, 32u);
|
|
|
|
|
ref = amduat_reference(0x0003u, amduat_octets(digest, 32u));
|
2025-12-21 19:23:41 +01:00
|
|
|
|
|
|
|
|
err = amduat_tgk_store_resolve_edge((amduat_tgk_store_t *)&env->store, ref,
|
|
|
|
|
&body);
|
|
|
|
|
if (err != GS_ERR_UNSUPPORTED) {
|
|
|
|
|
fprintf(stderr, "resolve_edge unsupported mismatch: %d\n", err);
|
|
|
|
|
goto cleanup;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
exit_code = 0;
|
|
|
|
|
cleanup:
|
|
|
|
|
free_ref(&ref);
|
|
|
|
|
return exit_code;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-21 08:44:04 +01:00
|
|
|
static int test_type_filter(const test_env_t *env) {
|
|
|
|
|
amduat_tgk_edge_type_id_t types[1] = {0x10};
|
|
|
|
|
amduat_tgk_edge_type_filter_t filter;
|
|
|
|
|
amduat_tgk_graph_edge_view_list_t edges;
|
|
|
|
|
size_t i;
|
|
|
|
|
|
|
|
|
|
filter.types = types;
|
|
|
|
|
filter.types_len = 1;
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_edges_incident((amduat_tgk_store_t *)&env->store,
|
|
|
|
|
env->node_a, filter, &edges)) {
|
|
|
|
|
fprintf(stderr, "edges_incident type filter failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (edges.len != 2) {
|
|
|
|
|
fprintf(stderr, "edges_incident type filter count mismatch\n");
|
|
|
|
|
amduat_tgk_graph_edge_view_list_free(&edges);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
for (i = 0; i < edges.len; ++i) {
|
|
|
|
|
if (edges.edges[i].body.type != 0x10) {
|
|
|
|
|
fprintf(stderr, "edges_incident type filter type mismatch\n");
|
|
|
|
|
amduat_tgk_graph_edge_view_list_free(&edges);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_graph_edge_view_list_free(&edges);
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int test_ordering(const test_env_t *env) {
|
|
|
|
|
amduat_tgk_edge_type_filter_t filter;
|
|
|
|
|
amduat_tgk_graph_edge_view_list_t edges;
|
|
|
|
|
|
|
|
|
|
filter.types = NULL;
|
|
|
|
|
filter.types_len = 0;
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_edges_from((amduat_tgk_store_t *)&env->store,
|
|
|
|
|
env->node_a, filter, &edges)) {
|
|
|
|
|
fprintf(stderr, "edges_from ordering failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (edges.len != 2) {
|
|
|
|
|
fprintf(stderr, "edges_from ordering count mismatch\n");
|
|
|
|
|
amduat_tgk_graph_edge_view_list_free(&edges);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (!amduat_reference_eq(edges.edges[0].edge_ref, env->ref_edge2) ||
|
|
|
|
|
!amduat_reference_eq(edges.edges[1].edge_ref, env->ref_edge1)) {
|
|
|
|
|
fprintf(stderr, "edges_from ordering mismatch\n");
|
|
|
|
|
amduat_tgk_graph_edge_view_list_free(&edges);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_graph_edge_view_list_free(&edges);
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int test_adjacency(const test_env_t *env) {
|
|
|
|
|
amduat_tgk_edge_type_filter_t filter;
|
|
|
|
|
amduat_tgk_graph_edge_view_list_t edges;
|
|
|
|
|
|
|
|
|
|
filter.types = NULL;
|
|
|
|
|
filter.types_len = 0;
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_edges_to((amduat_tgk_store_t *)&env->store,
|
|
|
|
|
env->node_a, filter, &edges)) {
|
|
|
|
|
fprintf(stderr, "edges_to failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (edges.len != 1 ||
|
|
|
|
|
!amduat_reference_eq(edges.edges[0].edge_ref, env->ref_edge3)) {
|
|
|
|
|
fprintf(stderr, "edges_to mismatch\n");
|
|
|
|
|
amduat_tgk_graph_edge_view_list_free(&edges);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_graph_edge_view_list_free(&edges);
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_edges_incident((amduat_tgk_store_t *)&env->store,
|
|
|
|
|
env->node_a, filter, &edges)) {
|
|
|
|
|
fprintf(stderr, "edges_incident failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (edges.len != 3 ||
|
|
|
|
|
!amduat_reference_eq(edges.edges[0].edge_ref, env->ref_edge2) ||
|
|
|
|
|
!amduat_reference_eq(edges.edges[1].edge_ref, env->ref_edge1) ||
|
|
|
|
|
!amduat_reference_eq(edges.edges[2].edge_ref, env->ref_edge3)) {
|
|
|
|
|
fprintf(stderr, "edges_incident mismatch\n");
|
|
|
|
|
amduat_tgk_graph_edge_view_list_free(&edges);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_graph_edge_view_list_free(&edges);
|
|
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int test_neighbors(const test_env_t *env) {
|
|
|
|
|
amduat_tgk_edge_type_filter_t filter;
|
|
|
|
|
amduat_tgk_node_list_t nodes;
|
|
|
|
|
|
|
|
|
|
filter.types = NULL;
|
|
|
|
|
filter.types_len = 0;
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_neighbors((amduat_tgk_store_t *)&env->store,
|
|
|
|
|
env->node_a, filter,
|
|
|
|
|
AMDUAT_TGK_GRAPH_DIR_OUT, &nodes)) {
|
|
|
|
|
fprintf(stderr, "neighbors out failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (nodes.len != 2 ||
|
|
|
|
|
!amduat_reference_eq(nodes.nodes[0], env->node_b) ||
|
|
|
|
|
!amduat_reference_eq(nodes.nodes[1], env->node_c)) {
|
|
|
|
|
fprintf(stderr, "neighbors out mismatch\n");
|
|
|
|
|
amduat_tgk_node_list_free(&nodes);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_node_list_free(&nodes);
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_neighbors((amduat_tgk_store_t *)&env->store,
|
|
|
|
|
env->node_a, filter,
|
|
|
|
|
AMDUAT_TGK_GRAPH_DIR_IN, &nodes)) {
|
|
|
|
|
fprintf(stderr, "neighbors in failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (nodes.len != 1 ||
|
|
|
|
|
!amduat_reference_eq(nodes.nodes[0], env->node_b)) {
|
|
|
|
|
fprintf(stderr, "neighbors in mismatch\n");
|
|
|
|
|
amduat_tgk_node_list_free(&nodes);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_node_list_free(&nodes);
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_neighbors((amduat_tgk_store_t *)&env->store,
|
|
|
|
|
env->node_a, filter,
|
|
|
|
|
AMDUAT_TGK_GRAPH_DIR_BOTH, &nodes)) {
|
|
|
|
|
fprintf(stderr, "neighbors both failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (nodes.len != 2 ||
|
|
|
|
|
!amduat_reference_eq(nodes.nodes[0], env->node_b) ||
|
|
|
|
|
!amduat_reference_eq(nodes.nodes[1], env->node_c)) {
|
|
|
|
|
fprintf(stderr, "neighbors both mismatch\n");
|
|
|
|
|
amduat_tgk_node_list_free(&nodes);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_node_list_free(&nodes);
|
|
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int test_scan_edges(const test_env_t *env) {
|
|
|
|
|
amduat_tgk_edge_type_filter_t filter;
|
|
|
|
|
amduat_tgk_graph_scan_result_t scan;
|
|
|
|
|
|
|
|
|
|
filter.types = NULL;
|
|
|
|
|
filter.types_len = 0;
|
|
|
|
|
|
|
|
|
|
if (!amduat_tgk_store_scan_edges((amduat_tgk_store_t *)&env->store, filter,
|
|
|
|
|
amduat_octets(NULL, 0), false, &scan)) {
|
|
|
|
|
fprintf(stderr, "scan_edges failed\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
if (scan.edges.len != 3 ||
|
|
|
|
|
!amduat_reference_eq(scan.edges.edges[0].edge_ref, env->ref_edge2) ||
|
|
|
|
|
!amduat_reference_eq(scan.edges.edges[1].edge_ref, env->ref_edge1) ||
|
|
|
|
|
!amduat_reference_eq(scan.edges.edges[2].edge_ref, env->ref_edge3)) {
|
|
|
|
|
fprintf(stderr, "scan_edges ordering mismatch\n");
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
amduat_tgk_graph_scan_result_free(&scan);
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int main(void) {
|
|
|
|
|
test_env_t env;
|
|
|
|
|
|
|
|
|
|
if (!init_env(&env)) {
|
|
|
|
|
fprintf(stderr, "failed to init tgk store mem test env\n");
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (test_resolve_edge_ok(&env) != 0 ||
|
|
|
|
|
test_resolve_edge_not_edge(&env) != 0 ||
|
2025-12-21 19:46:56 +01:00
|
|
|
test_resolve_edge_invalid_bytes(&env) != 0 ||
|
2025-12-21 19:23:41 +01:00
|
|
|
test_resolve_edge_missing(&env) != 0 ||
|
|
|
|
|
test_resolve_edge_unsupported(&env) != 0 ||
|
2025-12-21 19:46:56 +01:00
|
|
|
test_init_rejects_unsupported_encoding() != 0 ||
|
2025-12-21 20:24:34 +01:00
|
|
|
test_init_rejects_duplicate_hash_id() != 0 ||
|
2025-12-22 11:42:41 +01:00
|
|
|
test_init_rejects_invalid_profile_config() != 0 ||
|
2025-12-21 20:24:34 +01:00
|
|
|
test_duplicate_edge_ref_same_artifact() != 0 ||
|
|
|
|
|
test_duplicate_edge_ref_conflict() != 0 ||
|
2025-12-21 20:36:19 +01:00
|
|
|
test_scan_edges_pagination() != 0 ||
|
2025-12-21 21:23:55 +01:00
|
|
|
test_ingest_remove_epoch() != 0 ||
|
2025-12-21 21:31:55 +01:00
|
|
|
test_ingest_batch_epoch() != 0 ||
|
2025-12-21 21:52:06 +01:00
|
|
|
test_snapshot_consistency() != 0 ||
|
2025-12-21 08:44:04 +01:00
|
|
|
test_type_filter(&env) != 0 ||
|
|
|
|
|
test_ordering(&env) != 0 ||
|
|
|
|
|
test_adjacency(&env) != 0 ||
|
|
|
|
|
test_neighbors(&env) != 0 ||
|
|
|
|
|
test_scan_edges(&env) != 0) {
|
|
|
|
|
free_env(&env);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
free_env(&env);
|
|
|
|
|
return 0;
|
|
|
|
|
}
|