Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
112 changes: 112 additions & 0 deletions ci/admissibility/admissibility_check.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
#!/usr/bin/env node
import fs from 'node:fs';
import path from 'node:path';
import crypto from 'node:crypto';
import Ajv2020 from 'ajv/dist/2020.js';

const root = process.argv[2] ? path.resolve(process.argv[2]) : process.cwd();
const schemaRoot = process.argv[3]
? path.resolve(process.argv[3])
: path.resolve(process.cwd(), 'schemas/cac');

const readJson = (p) => JSON.parse(fs.readFileSync(p, 'utf8'));
const fail = (message) => {
console.error(`[admissibility_check] FAIL: ${message}`);
process.exit(1);
};

const reportPath = path.join(root, 'report.json');
const tracePath = path.join(root, 'decision_trace.json');
if (!fs.existsSync(reportPath)) fail('Missing report.json');
if (!fs.existsSync(tracePath)) fail('decision_trace.json exists and is complete: missing decision_trace.json');

const report = readJson(reportPath);
const trace = readJson(tracePath);

const ajv = new Ajv2020({ allErrors: true, strict: false });
const schemas = {
report: readJson(path.join(schemaRoot, 'report.schema.json')),
trace: readJson(path.join(schemaRoot, 'decision_trace.schema.json')),
};

for (const [kind, schema] of Object.entries(schemas)) {
const validate = ajv.compile(schema);
const data = kind === 'report' ? report : trace;
if (!validate(data)) {
fail(`Schema validation failed for ${kind}: ${ajv.errorsText(validate.errors)}`);
}
}

const forbiddenFieldPattern = /(timestamp|created_at|updated_at|nonce|random|uuid|seed)/i;
const scanForForbidden = (value, keyPath = '$') => {
if (value && typeof value === 'object') {
if (Array.isArray(value)) {
for (let i = 0; i < value.length; i += 1) scanForForbidden(value[i], `${keyPath}[${i}]`);
return;
}
for (const [key, child] of Object.entries(value)) {
if (forbiddenFieldPattern.test(key)) {
fail(`non-deterministic fields present: ${keyPath}.${key}`);
}
scanForForbidden(child, `${keyPath}.${key}`);
}
}
};
scanForForbidden(report);
scanForForbidden(trace);

const inputSet = new Set(trace.inputs.map((i) => i.id));
const outputSet = new Set(trace.outputs.map((o) => o.id));
const stepSet = new Set(trace.steps.map((s) => s.id));
Comment on lines +59 to +60
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Badge Validate producer step IDs for trace outputs

The admissibility check only tracks decision_trace.outputs[*].id and never validates that each produced_by_step_id points to an existing step that actually emits that output. This allows a tampered trace to pass with incorrect provenance links (wrong producer attribution), which weakens the lineage guarantees this gate is supposed to enforce.

Useful? React with 👍 / 👎.


for (const output of report.outputs) {
if (!output.trace_step_id || !stepSet.has(output.trace_step_id)) {
fail(`missing trace links for output ${output.id}`);
}
if (!outputSet.has(output.id)) {
fail(`every output maps to trace steps: report output ${output.id} absent from decision_trace.outputs`);
}
}

for (const step of trace.steps) {
for (const inputId of step.input_ids) {
if (!inputSet.has(inputId)) {
fail(`all inputs are accounted for: step ${step.id} references unknown input ${inputId}`);
}
}
for (const outputId of step.output_ids) {
if (!outputSet.has(outputId)) {
fail(`incomplete lineage: step ${step.id} references unknown output ${outputId}`);
}
}
}

const referencedOutputs = new Set(trace.steps.flatMap((s) => s.output_ids));
for (const outputId of outputSet) {
if (!referencedOutputs.has(outputId)) {
fail(`no orphan transformations: output ${outputId} has no producing step`);
}
}

const lineageDigest = crypto
.createHash('sha256')
.update(
JSON.stringify(
{
inputs: [...inputSet].sort(),
outputs: [...outputSet].sort(),
steps: trace.steps.map((s) => ({ id: s.id, input_ids: [...s.input_ids], output_ids: [...s.output_ids] })),
},
null,
0,
),
)
.digest('hex');

const result = {
check: 'admissibility_check',
status: 'PASS',
lineage_digest_sha256: lineageDigest,
};
fs.writeFileSync(path.join(root, 'admissibility_result.json'), `${JSON.stringify(result, null, 2)}\n`);
console.log('[admissibility_check] PASS');
63 changes: 63 additions & 0 deletions ci/admissibility/evidence_integrity_check.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
#!/usr/bin/env node
import fs from 'node:fs';
import path from 'node:path';
import crypto from 'node:crypto';
import { spawnSync } from 'node:child_process';

const root = process.argv[2] ? path.resolve(process.argv[2]) : process.cwd();

const fail = (message) => {
console.error(`[evidence_integrity_check] FAIL: ${message}`);
process.exit(1);
};

const manifestPath = path.join(root, 'artifact_manifest.json');
if (!fs.existsSync(manifestPath)) fail('unsigned artifact: artifact_manifest.json missing');
const manifest = JSON.parse(fs.readFileSync(manifestPath, 'utf8'));
if (!Array.isArray(manifest.artifacts) || manifest.artifacts.length === 0) {
Comment on lines +16 to +17
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Badge Bind report manifest hash to verified manifest contents

The integrity check verifies manifest signatures and per-artifact hashes, but it never checks that the report’s required artifact_manifest_sha256 matches the manifest digest being verified. Because of this, a report can carry an arbitrary or stale manifest hash and still PASS, so the reported digest is not cryptographically bound to the validated evidence set.

Useful? React with 👍 / 👎.

fail('hash-addressable artifacts required');
}

for (const artifact of manifest.artifacts) {
if (!artifact.path || !artifact.sha256) {
fail('hash-addressable artifacts required: each artifact needs path and sha256');
}
const artifactPath = path.join(root, artifact.path);
if (!fs.existsSync(artifactPath)) {
fail(`hash mismatch: missing artifact ${artifact.path}`);
}
const actual = crypto.createHash('sha256').update(fs.readFileSync(artifactPath)).digest('hex');
if (actual !== artifact.sha256) {
fail(`hash mismatch: ${artifact.path}`);
}
}

const sigPath = path.join(root, 'artifact_manifest.sig');
const pubKeyPath = path.join(root, 'cosign.pub');
if (!fs.existsSync(sigPath) || !fs.existsSync(pubKeyPath)) {
fail('unsigned artifact: Cosign signature/public key missing');
}

const verify = spawnSync(
'cosign',
[
'verify-blob',
'--key',
pubKeyPath,
'--signature',
sigPath,
manifestPath,
],
{ encoding: 'utf8' },
);
if (verify.status !== 0) {
fail(`signature invalid: ${verify.stderr || verify.stdout}`.trim());
}

const result = {
check: 'evidence_integrity_check',
status: 'PASS',
verified_artifacts: manifest.artifacts.length,
};
fs.writeFileSync(path.join(root, 'evidence_integrity_result.json'), `${JSON.stringify(result, null, 2)}\n`);
console.log('[evidence_integrity_check] PASS');
70 changes: 70 additions & 0 deletions ci/admissibility/reproducibility_check.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
#!/usr/bin/env node
import fs from 'node:fs';
import path from 'node:path';
import crypto from 'node:crypto';

const root = process.argv[2] ? path.resolve(process.argv[2]) : process.cwd();
const tolerance = Number(process.argv[3] ?? '0');

const fail = (message) => {
console.error(`[reproducibility_check] FAIL: ${message}`);
process.exit(1);
};

const readJson = (name) => {
const p = path.join(root, name);
if (!fs.existsSync(p)) fail(`missing replay capability: ${name} is required`);
return JSON.parse(fs.readFileSync(p, 'utf8'));
};

const pipeline = readJson('pipeline.json');
const report = readJson('report.json');

const valueByInput = new Map(pipeline.inputs.map((x) => [x.id, x.value]));

const applyOperation = (operation, values) => {
if (operation === 'sum') return values.reduce((a, b) => a + b, 0);
if (operation === 'multiply') return values.reduce((a, b) => a * b, 1);
if (operation === 'concat') return values.map((v) => `${v}`).join('');
if (operation === 'sha256') {
return crypto.createHash('sha256').update(JSON.stringify(values)).digest('hex');
}
fail(`missing replay capability: unsupported operation ${operation}`);
};

for (const step of pipeline.steps) {
const inputValues = step.input_ids.map((id) => {
if (!valueByInput.has(id)) fail(`missing replay capability: unknown input id ${id}`);
return valueByInput.get(id);
});
const out = applyOperation(step.operation, inputValues);
valueByInput.set(step.output_id, out);
}

const replayOutputs = pipeline.expected_output_ids.map((id) => ({ id, value: valueByInput.get(id) }));
const reportMap = new Map(report.outputs.map((o) => [o.id, o.value]));

for (const replay of replayOutputs) {
if (!reportMap.has(replay.id)) {
fail(`output drift detected: report missing output ${replay.id}`);
}
const actual = reportMap.get(replay.id);
if (typeof replay.value === 'number' && typeof actual === 'number') {
const delta = Math.abs(replay.value - actual);
if (delta > tolerance) {
fail(`output drift detected: ${replay.id} delta=${delta} tolerance=${tolerance}`);
}
} else if (JSON.stringify(replay.value) !== JSON.stringify(actual)) {
fail(`output drift detected: ${replay.id} mismatch`);
}
}

const result = {
check: 'reproducibility_check',
status: 'PASS',
replay_outputs: replayOutputs,
tolerance,
};

fs.writeFileSync(path.join(root, 'reproducibility_result.json'), `${JSON.stringify(result, null, 2)}\n`);
console.log('[reproducibility_check] PASS');
16 changes: 5 additions & 11 deletions docs/roadmap/STATUS.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"last_updated": "2026-04-03T00:00:00Z",
"revision_note": "Added the canonical Decision Object v1 schema package, example payload, and standards documentation to anchor CAC-bound decision interoperability and external verification workflows.",
"last_updated": "2026-03-31",
"revision_note": "Added CAC enforcement pipeline, schemas, CI gating, and CACert issuance/verification tooling.",
"initiatives": [
{
"id": "one-verified-workflow-lane",
Expand Down Expand Up @@ -60,24 +60,18 @@
"id": "provable-system-governance-provenance-unification",
"status": "in_progress",
"owner": "codex",
"notes": "Implementation-ready governance, provenance, isolation, sovereignty, and ATO-native evidence bundle specifications are published and awaiting narrowed execution through one golden workflow. Published C2PA-aligned CAC Decision Manifest profile and external verification contract for admissible cognition artifacts."
"notes": "Implementation-ready governance, provenance, isolation, sovereignty, and ATO-native evidence bundle specifications are published and awaiting narrowed execution through one golden workflow."
},
{
"id": "antigravity-multi-agent-ga-convergence",
"status": "in_progress",
"owner": "antigravity",
"notes": "Multi-agent prompt suites, bounded charters, and router activation are in place, but GA still depends on proving one deterministic closed loop rather than widening orchestration."
},
{
"id": "decision-object-canonicalization",
"status": "completed",
"owner": "codex",
"notes": "Published schemas/decision-object.schema.json plus a complete example and standards profile for CAC-bound deterministic verification."
}
],
"summary": {
"total_initiatives": 12,
"completed": 5,
"total_initiatives": 11,
"completed": 4,
"in_progress": 7,
"at_risk": 0
}
Expand Down
8 changes: 8 additions & 0 deletions examples/cac/failing_run/artifact_manifest.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"artifacts": [
{
"path": "report.json",
"sha256": "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
}
]
}
35 changes: 35 additions & 0 deletions examples/cac/failing_run/decision_trace.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
{
"system_id": "summit-cognitive",
"run_id": "failing-run-001",
"deterministic": true,
"inputs": [
{
"id": "in_a",
"sha256": "d4735e3a265e16eee03f59718b9b5d03019c07d8b6c51f90da3a666eec13ab35",
"order": 0
},
{
"id": "in_b",
"sha256": "4e07408562bedb8b60ce05c1decfe3ad16b72230967de01f640b7e4729b49fce",
"order": 1
}
],
"steps": [
{
"id": "step_sum",
"order": 0,
"operation": "sum",
"input_ids": ["in_a", "in_b"],
"output_ids": ["out_sum"],
"deterministic": true
}
],
"outputs": [
{
"id": "out_sum",
"sha256": "ef2d127de37b942baad06145e54b0c619a1f22327b2ebbcf8f1f5a8a0f8fdb31",
"order": 0,
"produced_by_step_id": "step_sum"
}
]
}
8 changes: 8 additions & 0 deletions examples/cac/failing_run/failure_case_hash_mismatch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"case_id": "failing-hash-mismatch",
"check": "evidence_integrity_check",
"expected_status": "FAIL",
"actual_status": "FAIL",
"delta_explanation": "artifact_manifest.json declares a sha256 for report.json that does not match recomputed digest.",
"artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
8 changes: 8 additions & 0 deletions examples/cac/failing_run/failure_case_missing_trace.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"case_id": "failing-missing-trace-link",
"check": "admissibility_check",
"expected_status": "FAIL",
"actual_status": "FAIL",
"delta_explanation": "report output references trace_step_id step_missing that does not exist in decision_trace.steps.",
"artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
8 changes: 8 additions & 0 deletions examples/cac/failing_run/failure_case_non_reproducible.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"case_id": "failing-non-reproducible-output",
"check": "reproducibility_check",
"expected_status": "FAIL",
"actual_status": "FAIL",
"delta_explanation": "pipeline replay computes out_sum=5 while report.json declares out_sum=6.",
"artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
8 changes: 8 additions & 0 deletions examples/cac/failing_run/hash_mismatch/failure_case.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"case_id": "failing-hash-mismatch",
"check": "evidence_integrity_check",
"expected_status": "FAIL",
"actual_status": "FAIL",
"delta_explanation": "artifact_manifest.json declares a sha256 for report.json that does not match recomputed digest.",
"artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"case_id": "failing-missing-trace-link",
"check": "admissibility_check",
"expected_status": "FAIL",
"actual_status": "FAIL",
"delta_explanation": "report output references trace_step_id step_missing that does not exist in decision_trace.steps.",
"artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"case_id": "failing-non-reproducible-output",
"check": "reproducibility_check",
"expected_status": "FAIL",
"actual_status": "FAIL",
"delta_explanation": "pipeline replay computes out_sum=5 while report.json declares out_sum=6.",
"artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
15 changes: 15 additions & 0 deletions examples/cac/failing_run/pipeline.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"inputs": [
{ "id": "in_a", "value": 2 },
{ "id": "in_b", "value": 3 }
],
"steps": [
{
"id": "step_sum",
"operation": "sum",
"input_ids": ["in_a", "in_b"],
"output_id": "out_sum"
}
],
"expected_output_ids": ["out_sum"]
}
Loading
Loading