-
Notifications
You must be signed in to change notification settings - Fork 1
feat: enforce CAC admissibility pipeline and signed CACert issuance #23618
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,112 @@ | ||
| #!/usr/bin/env node | ||
| import fs from 'node:fs'; | ||
| import path from 'node:path'; | ||
| import crypto from 'node:crypto'; | ||
| import Ajv2020 from 'ajv/dist/2020.js'; | ||
|
|
||
| const root = process.argv[2] ? path.resolve(process.argv[2]) : process.cwd(); | ||
| const schemaRoot = process.argv[3] | ||
| ? path.resolve(process.argv[3]) | ||
| : path.resolve(process.cwd(), 'schemas/cac'); | ||
|
|
||
| const readJson = (p) => JSON.parse(fs.readFileSync(p, 'utf8')); | ||
| const fail = (message) => { | ||
| console.error(`[admissibility_check] FAIL: ${message}`); | ||
| process.exit(1); | ||
| }; | ||
|
|
||
| const reportPath = path.join(root, 'report.json'); | ||
| const tracePath = path.join(root, 'decision_trace.json'); | ||
| if (!fs.existsSync(reportPath)) fail('Missing report.json'); | ||
| if (!fs.existsSync(tracePath)) fail('decision_trace.json exists and is complete: missing decision_trace.json'); | ||
|
|
||
| const report = readJson(reportPath); | ||
| const trace = readJson(tracePath); | ||
|
|
||
| const ajv = new Ajv2020({ allErrors: true, strict: false }); | ||
| const schemas = { | ||
| report: readJson(path.join(schemaRoot, 'report.schema.json')), | ||
| trace: readJson(path.join(schemaRoot, 'decision_trace.schema.json')), | ||
| }; | ||
|
|
||
| for (const [kind, schema] of Object.entries(schemas)) { | ||
| const validate = ajv.compile(schema); | ||
| const data = kind === 'report' ? report : trace; | ||
| if (!validate(data)) { | ||
| fail(`Schema validation failed for ${kind}: ${ajv.errorsText(validate.errors)}`); | ||
| } | ||
| } | ||
|
|
||
| const forbiddenFieldPattern = /(timestamp|created_at|updated_at|nonce|random|uuid|seed)/i; | ||
| const scanForForbidden = (value, keyPath = '$') => { | ||
| if (value && typeof value === 'object') { | ||
| if (Array.isArray(value)) { | ||
| for (let i = 0; i < value.length; i += 1) scanForForbidden(value[i], `${keyPath}[${i}]`); | ||
| return; | ||
| } | ||
| for (const [key, child] of Object.entries(value)) { | ||
| if (forbiddenFieldPattern.test(key)) { | ||
| fail(`non-deterministic fields present: ${keyPath}.${key}`); | ||
| } | ||
| scanForForbidden(child, `${keyPath}.${key}`); | ||
| } | ||
| } | ||
| }; | ||
| scanForForbidden(report); | ||
| scanForForbidden(trace); | ||
|
|
||
| const inputSet = new Set(trace.inputs.map((i) => i.id)); | ||
| const outputSet = new Set(trace.outputs.map((o) => o.id)); | ||
| const stepSet = new Set(trace.steps.map((s) => s.id)); | ||
|
|
||
| for (const output of report.outputs) { | ||
| if (!output.trace_step_id || !stepSet.has(output.trace_step_id)) { | ||
| fail(`missing trace links for output ${output.id}`); | ||
| } | ||
| if (!outputSet.has(output.id)) { | ||
| fail(`every output maps to trace steps: report output ${output.id} absent from decision_trace.outputs`); | ||
| } | ||
| } | ||
|
|
||
| for (const step of trace.steps) { | ||
| for (const inputId of step.input_ids) { | ||
| if (!inputSet.has(inputId)) { | ||
| fail(`all inputs are accounted for: step ${step.id} references unknown input ${inputId}`); | ||
| } | ||
| } | ||
| for (const outputId of step.output_ids) { | ||
| if (!outputSet.has(outputId)) { | ||
| fail(`incomplete lineage: step ${step.id} references unknown output ${outputId}`); | ||
| } | ||
| } | ||
| } | ||
|
|
||
| const referencedOutputs = new Set(trace.steps.flatMap((s) => s.output_ids)); | ||
| for (const outputId of outputSet) { | ||
| if (!referencedOutputs.has(outputId)) { | ||
| fail(`no orphan transformations: output ${outputId} has no producing step`); | ||
| } | ||
| } | ||
|
|
||
| const lineageDigest = crypto | ||
| .createHash('sha256') | ||
| .update( | ||
| JSON.stringify( | ||
| { | ||
| inputs: [...inputSet].sort(), | ||
| outputs: [...outputSet].sort(), | ||
| steps: trace.steps.map((s) => ({ id: s.id, input_ids: [...s.input_ids], output_ids: [...s.output_ids] })), | ||
| }, | ||
| null, | ||
| 0, | ||
| ), | ||
| ) | ||
| .digest('hex'); | ||
|
|
||
| const result = { | ||
| check: 'admissibility_check', | ||
| status: 'PASS', | ||
| lineage_digest_sha256: lineageDigest, | ||
| }; | ||
| fs.writeFileSync(path.join(root, 'admissibility_result.json'), `${JSON.stringify(result, null, 2)}\n`); | ||
| console.log('[admissibility_check] PASS'); | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,63 @@ | ||
| #!/usr/bin/env node | ||
| import fs from 'node:fs'; | ||
| import path from 'node:path'; | ||
| import crypto from 'node:crypto'; | ||
| import { spawnSync } from 'node:child_process'; | ||
|
|
||
| const root = process.argv[2] ? path.resolve(process.argv[2]) : process.cwd(); | ||
|
|
||
| const fail = (message) => { | ||
| console.error(`[evidence_integrity_check] FAIL: ${message}`); | ||
| process.exit(1); | ||
| }; | ||
|
|
||
| const manifestPath = path.join(root, 'artifact_manifest.json'); | ||
| if (!fs.existsSync(manifestPath)) fail('unsigned artifact: artifact_manifest.json missing'); | ||
| const manifest = JSON.parse(fs.readFileSync(manifestPath, 'utf8')); | ||
| if (!Array.isArray(manifest.artifacts) || manifest.artifacts.length === 0) { | ||
|
Comment on lines
+16
to
+17
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
The integrity check verifies manifest signatures and per-artifact hashes, but it never checks that the report’s required Useful? React with 👍 / 👎. |
||
| fail('hash-addressable artifacts required'); | ||
| } | ||
|
|
||
| for (const artifact of manifest.artifacts) { | ||
| if (!artifact.path || !artifact.sha256) { | ||
| fail('hash-addressable artifacts required: each artifact needs path and sha256'); | ||
| } | ||
| const artifactPath = path.join(root, artifact.path); | ||
| if (!fs.existsSync(artifactPath)) { | ||
| fail(`hash mismatch: missing artifact ${artifact.path}`); | ||
| } | ||
| const actual = crypto.createHash('sha256').update(fs.readFileSync(artifactPath)).digest('hex'); | ||
| if (actual !== artifact.sha256) { | ||
| fail(`hash mismatch: ${artifact.path}`); | ||
| } | ||
| } | ||
|
|
||
| const sigPath = path.join(root, 'artifact_manifest.sig'); | ||
| const pubKeyPath = path.join(root, 'cosign.pub'); | ||
| if (!fs.existsSync(sigPath) || !fs.existsSync(pubKeyPath)) { | ||
| fail('unsigned artifact: Cosign signature/public key missing'); | ||
| } | ||
|
|
||
| const verify = spawnSync( | ||
| 'cosign', | ||
| [ | ||
| 'verify-blob', | ||
| '--key', | ||
| pubKeyPath, | ||
| '--signature', | ||
| sigPath, | ||
| manifestPath, | ||
| ], | ||
| { encoding: 'utf8' }, | ||
| ); | ||
| if (verify.status !== 0) { | ||
| fail(`signature invalid: ${verify.stderr || verify.stdout}`.trim()); | ||
| } | ||
|
|
||
| const result = { | ||
| check: 'evidence_integrity_check', | ||
| status: 'PASS', | ||
| verified_artifacts: manifest.artifacts.length, | ||
| }; | ||
| fs.writeFileSync(path.join(root, 'evidence_integrity_result.json'), `${JSON.stringify(result, null, 2)}\n`); | ||
| console.log('[evidence_integrity_check] PASS'); | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,70 @@ | ||
| #!/usr/bin/env node | ||
| import fs from 'node:fs'; | ||
| import path from 'node:path'; | ||
| import crypto from 'node:crypto'; | ||
|
|
||
| const root = process.argv[2] ? path.resolve(process.argv[2]) : process.cwd(); | ||
| const tolerance = Number(process.argv[3] ?? '0'); | ||
|
|
||
| const fail = (message) => { | ||
| console.error(`[reproducibility_check] FAIL: ${message}`); | ||
| process.exit(1); | ||
| }; | ||
|
|
||
| const readJson = (name) => { | ||
| const p = path.join(root, name); | ||
| if (!fs.existsSync(p)) fail(`missing replay capability: ${name} is required`); | ||
| return JSON.parse(fs.readFileSync(p, 'utf8')); | ||
| }; | ||
|
|
||
| const pipeline = readJson('pipeline.json'); | ||
| const report = readJson('report.json'); | ||
|
|
||
| const valueByInput = new Map(pipeline.inputs.map((x) => [x.id, x.value])); | ||
|
|
||
| const applyOperation = (operation, values) => { | ||
| if (operation === 'sum') return values.reduce((a, b) => a + b, 0); | ||
| if (operation === 'multiply') return values.reduce((a, b) => a * b, 1); | ||
| if (operation === 'concat') return values.map((v) => `${v}`).join(''); | ||
| if (operation === 'sha256') { | ||
| return crypto.createHash('sha256').update(JSON.stringify(values)).digest('hex'); | ||
| } | ||
| fail(`missing replay capability: unsupported operation ${operation}`); | ||
| }; | ||
|
|
||
| for (const step of pipeline.steps) { | ||
| const inputValues = step.input_ids.map((id) => { | ||
| if (!valueByInput.has(id)) fail(`missing replay capability: unknown input id ${id}`); | ||
| return valueByInput.get(id); | ||
| }); | ||
| const out = applyOperation(step.operation, inputValues); | ||
| valueByInput.set(step.output_id, out); | ||
| } | ||
|
|
||
| const replayOutputs = pipeline.expected_output_ids.map((id) => ({ id, value: valueByInput.get(id) })); | ||
| const reportMap = new Map(report.outputs.map((o) => [o.id, o.value])); | ||
|
|
||
| for (const replay of replayOutputs) { | ||
| if (!reportMap.has(replay.id)) { | ||
| fail(`output drift detected: report missing output ${replay.id}`); | ||
| } | ||
| const actual = reportMap.get(replay.id); | ||
| if (typeof replay.value === 'number' && typeof actual === 'number') { | ||
| const delta = Math.abs(replay.value - actual); | ||
| if (delta > tolerance) { | ||
| fail(`output drift detected: ${replay.id} delta=${delta} tolerance=${tolerance}`); | ||
| } | ||
| } else if (JSON.stringify(replay.value) !== JSON.stringify(actual)) { | ||
| fail(`output drift detected: ${replay.id} mismatch`); | ||
| } | ||
| } | ||
|
|
||
| const result = { | ||
| check: 'reproducibility_check', | ||
| status: 'PASS', | ||
| replay_outputs: replayOutputs, | ||
| tolerance, | ||
| }; | ||
|
|
||
| fs.writeFileSync(path.join(root, 'reproducibility_result.json'), `${JSON.stringify(result, null, 2)}\n`); | ||
| console.log('[reproducibility_check] PASS'); |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,8 @@ | ||
| { | ||
| "artifacts": [ | ||
| { | ||
| "path": "report.json", | ||
| "sha256": "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" | ||
| } | ||
| ] | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,35 @@ | ||
| { | ||
| "system_id": "summit-cognitive", | ||
| "run_id": "failing-run-001", | ||
| "deterministic": true, | ||
| "inputs": [ | ||
| { | ||
| "id": "in_a", | ||
| "sha256": "d4735e3a265e16eee03f59718b9b5d03019c07d8b6c51f90da3a666eec13ab35", | ||
| "order": 0 | ||
| }, | ||
| { | ||
| "id": "in_b", | ||
| "sha256": "4e07408562bedb8b60ce05c1decfe3ad16b72230967de01f640b7e4729b49fce", | ||
| "order": 1 | ||
| } | ||
| ], | ||
| "steps": [ | ||
| { | ||
| "id": "step_sum", | ||
| "order": 0, | ||
| "operation": "sum", | ||
| "input_ids": ["in_a", "in_b"], | ||
| "output_ids": ["out_sum"], | ||
| "deterministic": true | ||
| } | ||
| ], | ||
| "outputs": [ | ||
| { | ||
| "id": "out_sum", | ||
| "sha256": "ef2d127de37b942baad06145e54b0c619a1f22327b2ebbcf8f1f5a8a0f8fdb31", | ||
| "order": 0, | ||
| "produced_by_step_id": "step_sum" | ||
| } | ||
| ] | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,8 @@ | ||
| { | ||
| "case_id": "failing-hash-mismatch", | ||
| "check": "evidence_integrity_check", | ||
| "expected_status": "FAIL", | ||
| "actual_status": "FAIL", | ||
| "delta_explanation": "artifact_manifest.json declares a sha256 for report.json that does not match recomputed digest.", | ||
| "artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,8 @@ | ||
| { | ||
| "case_id": "failing-missing-trace-link", | ||
| "check": "admissibility_check", | ||
| "expected_status": "FAIL", | ||
| "actual_status": "FAIL", | ||
| "delta_explanation": "report output references trace_step_id step_missing that does not exist in decision_trace.steps.", | ||
| "artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,8 @@ | ||
| { | ||
| "case_id": "failing-non-reproducible-output", | ||
| "check": "reproducibility_check", | ||
| "expected_status": "FAIL", | ||
| "actual_status": "FAIL", | ||
| "delta_explanation": "pipeline replay computes out_sum=5 while report.json declares out_sum=6.", | ||
| "artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,8 @@ | ||
| { | ||
| "case_id": "failing-hash-mismatch", | ||
| "check": "evidence_integrity_check", | ||
| "expected_status": "FAIL", | ||
| "actual_status": "FAIL", | ||
| "delta_explanation": "artifact_manifest.json declares a sha256 for report.json that does not match recomputed digest.", | ||
| "artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,8 @@ | ||
| { | ||
| "case_id": "failing-missing-trace-link", | ||
| "check": "admissibility_check", | ||
| "expected_status": "FAIL", | ||
| "actual_status": "FAIL", | ||
| "delta_explanation": "report output references trace_step_id step_missing that does not exist in decision_trace.steps.", | ||
| "artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,8 @@ | ||
| { | ||
| "case_id": "failing-non-reproducible-output", | ||
| "check": "reproducibility_check", | ||
| "expected_status": "FAIL", | ||
| "actual_status": "FAIL", | ||
| "delta_explanation": "pipeline replay computes out_sum=5 while report.json declares out_sum=6.", | ||
| "artifact_manifest_sha256": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,15 @@ | ||
| { | ||
| "inputs": [ | ||
| { "id": "in_a", "value": 2 }, | ||
| { "id": "in_b", "value": 3 } | ||
| ], | ||
| "steps": [ | ||
| { | ||
| "id": "step_sum", | ||
| "operation": "sum", | ||
| "input_ids": ["in_a", "in_b"], | ||
| "output_id": "out_sum" | ||
| } | ||
| ], | ||
| "expected_output_ids": ["out_sum"] | ||
| } |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The admissibility check only tracks
decision_trace.outputs[*].idand never validates that eachproduced_by_step_idpoints to an existing step that actually emits that output. This allows a tampered trace to pass with incorrect provenance links (wrong producer attribution), which weakens the lineage guarantees this gate is supposed to enforce.Useful? React with 👍 / 👎.