Merge pull request 'fix: feat: implement evidence/resources and evidence/protocol logging (#1059)' (#1078) from fix/issue-1059 into master
This commit is contained in:
commit
a6f26453a0
8 changed files with 813 additions and 1 deletions
2
STATE.md
2
STATE.md
|
|
@ -50,4 +50,4 @@
|
||||||
- [2026-03-15] txnBot AGENTS.md ENVIRONMENT enum is stale (#784)
|
- [2026-03-15] txnBot AGENTS.md ENVIRONMENT enum is stale (#784)
|
||||||
- [2026-03-20] Adoption milestone state ambiguity in MEMORY.md (#1068)
|
- [2026-03-20] Adoption milestone state ambiguity in MEMORY.md (#1068)
|
||||||
- [2026-03-20] OptimizerV3Push3 as IOptimizer always returns bear defaults — integration risk (#1063)
|
- [2026-03-20] OptimizerV3Push3 as IOptimizer always returns bear defaults — integration risk (#1063)
|
||||||
- [2026-03-20] Red-team schema should add candidate_commit field (#1066)
|
- [2026-03-20] implement evidence/resources and evidence/protocol logging (#1059): formulas/run-resources.toml (disk/RAM/API/CI sense formula, daily cron 06:00 UTC) and formulas/run-protocol.toml (TVL/fees/positions/rebalances sense formula, daily cron 07:00 UTC); evidence/resources/ and evidence/protocol/ directories; schemas in evidence/README.md
|
||||||
|
|
|
||||||
|
|
@ -22,6 +22,10 @@ evidence/
|
||||||
YYYY-MM-DD-prNNN.json # per-scenario pass/fail, gate decision
|
YYYY-MM-DD-prNNN.json # per-scenario pass/fail, gate decision
|
||||||
user-test/
|
user-test/
|
||||||
YYYY-MM-DD.json # per-persona reports, screenshot refs, friction points
|
YYYY-MM-DD.json # per-persona reports, screenshot refs, friction points
|
||||||
|
resources/
|
||||||
|
YYYY-MM-DD.json # disk, RAM, API call counts, budget burn, CI queue depth
|
||||||
|
protocol/
|
||||||
|
YYYY-MM-DD.json # TVL, accumulated fees, position count, rebalance frequency
|
||||||
```
|
```
|
||||||
|
|
||||||
## Delivery Pattern
|
## Delivery Pattern
|
||||||
|
|
@ -215,3 +219,114 @@ Records a UX evaluation run across simulated personas.
|
||||||
| `personas_total` | integer | Total personas evaluated |
|
| `personas_total` | integer | Total personas evaluated |
|
||||||
| `critical_friction_points` | array of strings | Friction points that blocked task completion |
|
| `critical_friction_points` | array of strings | Friction points that blocked task completion |
|
||||||
| `verdict` | string | `"pass"` if all personas completed, `"fail"` otherwise |
|
| `verdict` | string | `"pass"` if all personas completed, `"fail"` otherwise |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Schema: `resources/YYYY-MM-DD.json`
|
||||||
|
|
||||||
|
Records one infrastructure resource snapshot.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"date": "YYYY-MM-DD",
|
||||||
|
"disk": {
|
||||||
|
"used_bytes": 85899345920,
|
||||||
|
"total_bytes": 107374182400,
|
||||||
|
"used_pct": 80.0
|
||||||
|
},
|
||||||
|
"ram": {
|
||||||
|
"used_bytes": 3221225472,
|
||||||
|
"total_bytes": 8589934592,
|
||||||
|
"used_pct": 37.5
|
||||||
|
},
|
||||||
|
"api": {
|
||||||
|
"anthropic_calls_24h": 142,
|
||||||
|
"anthropic_budget_usd_used": 4.87,
|
||||||
|
"anthropic_budget_usd_limit": 50.0,
|
||||||
|
"anthropic_budget_pct": 9.7
|
||||||
|
},
|
||||||
|
"ci": {
|
||||||
|
"woodpecker_queue_depth": 2,
|
||||||
|
"woodpecker_running": 1
|
||||||
|
},
|
||||||
|
"staleness_threshold_days": 1,
|
||||||
|
"verdict": "ok" | "warn" | "critical"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
| Field | Type | Description |
|
||||||
|
|-------|------|-------------|
|
||||||
|
| `date` | string (ISO) | Date of the snapshot |
|
||||||
|
| `disk.used_bytes` | integer | Bytes used on the primary volume |
|
||||||
|
| `disk.total_bytes` | integer | Total bytes on the primary volume |
|
||||||
|
| `disk.used_pct` | number | Percentage of disk used |
|
||||||
|
| `ram.used_bytes` | integer | Bytes of RAM in use |
|
||||||
|
| `ram.total_bytes` | integer | Total bytes of RAM |
|
||||||
|
| `ram.used_pct` | number | Percentage of RAM used |
|
||||||
|
| `api.anthropic_calls_24h` | integer | Anthropic API calls in the past 24 hours |
|
||||||
|
| `api.anthropic_budget_usd_used` | number | USD spent against the Anthropic budget |
|
||||||
|
| `api.anthropic_budget_usd_limit` | number | Configured Anthropic budget ceiling in USD |
|
||||||
|
| `api.anthropic_budget_pct` | number | Percentage of budget consumed |
|
||||||
|
| `ci.woodpecker_queue_depth` | integer | Number of jobs waiting in the Woodpecker CI queue |
|
||||||
|
| `ci.woodpecker_running` | integer | Number of Woodpecker jobs currently running |
|
||||||
|
| `staleness_threshold_days` | integer | Maximum age in days before this record is considered stale (always 1) |
|
||||||
|
| `verdict` | string | `"ok"` (all metrics normal), `"warn"` (≥80% on any dimension), or `"critical"` (≥95% on any dimension) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Schema: `protocol/YYYY-MM-DD.json`
|
||||||
|
|
||||||
|
Records one on-chain protocol health snapshot.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"date": "YYYY-MM-DD",
|
||||||
|
"block_number": 24500000,
|
||||||
|
"tvl_eth": "1234567890000000000000",
|
||||||
|
"tvl_eth_formatted": "1234.57",
|
||||||
|
"accumulated_fees_eth": "12345678900000000",
|
||||||
|
"accumulated_fees_eth_formatted": "0.012",
|
||||||
|
"position_count": 3,
|
||||||
|
"positions": [
|
||||||
|
{
|
||||||
|
"name": "floor",
|
||||||
|
"tick_lower": -887272,
|
||||||
|
"tick_upper": -200000,
|
||||||
|
"liquidity": "987654321000000000"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "anchor",
|
||||||
|
"tick_lower": -200000,
|
||||||
|
"tick_upper": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "discovery",
|
||||||
|
"tick_lower": 0,
|
||||||
|
"tick_upper": 887272
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"rebalance_count_24h": 4,
|
||||||
|
"last_rebalance_block": 24499800,
|
||||||
|
"staleness_threshold_days": 1,
|
||||||
|
"verdict": "healthy" | "degraded" | "offline"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
| Field | Type | Description |
|
||||||
|
|-------|------|-------------|
|
||||||
|
| `date` | string (ISO) | Date of the snapshot |
|
||||||
|
| `block_number` | integer | Block number at time of snapshot |
|
||||||
|
| `tvl_eth` | string (wei) | Total value locked across all LM positions in wei |
|
||||||
|
| `tvl_eth_formatted` | string | TVL formatted in ETH (2 dp) |
|
||||||
|
| `accumulated_fees_eth` | string (wei) | Fees accumulated by the LiquidityManager in wei |
|
||||||
|
| `accumulated_fees_eth_formatted` | string | Fees formatted in ETH (3 dp) |
|
||||||
|
| `position_count` | integer | Number of active Uniswap V3 positions (expected: 3) |
|
||||||
|
| `positions` | array | One entry per active position |
|
||||||
|
| `positions[].name` | string | Position label: `"floor"`, `"anchor"`, or `"discovery"` |
|
||||||
|
| `positions[].tick_lower` | integer | Lower tick boundary |
|
||||||
|
| `positions[].tick_upper` | integer | Upper tick boundary |
|
||||||
|
| `positions[].liquidity` | string | Liquidity amount in the position (wei-scale integer) |
|
||||||
|
| `rebalance_count_24h` | integer | Number of `recenter()` calls in the past 24 hours |
|
||||||
|
| `last_rebalance_block` | integer | Block number of the most recent `recenter()` call |
|
||||||
|
| `staleness_threshold_days` | integer | Maximum age in days before this record is considered stale (always 1) |
|
||||||
|
| `verdict` | string | `"healthy"` (positions active, TVL > 0), `"degraded"` (position_count < 3 or rebalance stalled), or `"offline"` (TVL = 0 or contract unreachable) |
|
||||||
|
|
|
||||||
0
evidence/protocol/.gitkeep
Normal file
0
evidence/protocol/.gitkeep
Normal file
0
evidence/resources/.gitkeep
Normal file
0
evidence/resources/.gitkeep
Normal file
187
formulas/run-protocol.toml
Normal file
187
formulas/run-protocol.toml
Normal file
|
|
@ -0,0 +1,187 @@
|
||||||
|
# formulas/run-protocol.toml
|
||||||
|
#
|
||||||
|
# On-chain protocol health snapshot — collect TVL, accumulated fees,
|
||||||
|
# position count, and rebalance frequency from the deployed LiquidityManager.
|
||||||
|
# Write a structured JSON evidence file for planner and predictor consumption.
|
||||||
|
#
|
||||||
|
# Type: sense. Read-only — produces metrics only, no git artifacts.
|
||||||
|
#
|
||||||
|
# Staleness threshold: 1 day (matches evidence/protocol/ schema).
|
||||||
|
# Cron: daily at 07:00 UTC (staggered 1 h after run-resources).
|
||||||
|
|
||||||
|
[formula]
|
||||||
|
id = "run-protocol"
|
||||||
|
name = "On-Chain Protocol Health Snapshot"
|
||||||
|
description = "Collect TVL, accumulated fees, position count, and rebalance frequency from the deployed LiquidityManager; write evidence/protocol/{date}.json."
|
||||||
|
type = "sense"
|
||||||
|
# "sense" → read-only, produces metrics only
|
||||||
|
# "act" → produces git artifacts (cf. run-evolution, run-red-team)
|
||||||
|
|
||||||
|
# ── Cron ───────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[cron]
|
||||||
|
schedule = "0 7 * * *" # daily at 07:00 UTC (1 h after run-resources)
|
||||||
|
description = "Matches 1-day staleness threshold — one snapshot per day keeps the record fresh."
|
||||||
|
|
||||||
|
# ── Inputs ─────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[inputs.rpc_url]
|
||||||
|
type = "string"
|
||||||
|
required = true
|
||||||
|
description = """
|
||||||
|
Base network RPC endpoint used to query on-chain state.
|
||||||
|
Example: https://mainnet.base.org or a running Anvil fork URL.
|
||||||
|
"""
|
||||||
|
|
||||||
|
[inputs.deployments_file]
|
||||||
|
type = "string"
|
||||||
|
required = false
|
||||||
|
default = "onchain/deployments-local.json"
|
||||||
|
description = """
|
||||||
|
Path to the deployments JSON file containing contract addresses.
|
||||||
|
The formula reads LiquidityManager address from this file.
|
||||||
|
Use onchain/deployments.json for mainnet; onchain/deployments-local.json
|
||||||
|
for a local Anvil fork.
|
||||||
|
"""
|
||||||
|
|
||||||
|
[inputs.lookback_blocks]
|
||||||
|
type = "integer"
|
||||||
|
required = false
|
||||||
|
default = 7200
|
||||||
|
description = """
|
||||||
|
Number of blocks to scan for Recenter events when computing
|
||||||
|
rebalance_count_24h (~24 h of Base blocks at ~2 s/block).
|
||||||
|
"""
|
||||||
|
|
||||||
|
# ── Execution ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[execution]
|
||||||
|
script = "scripts/harb-evaluator/run-protocol.sh"
|
||||||
|
invocation = "RPC_URL={rpc_url} DEPLOYMENTS_FILE={deployments_file} LOOKBACK_BLOCKS={lookback_blocks} bash scripts/harb-evaluator/run-protocol.sh"
|
||||||
|
|
||||||
|
# Exit codes:
|
||||||
|
# 0 snapshot written successfully
|
||||||
|
# 2 infrastructure error (RPC unreachable, missing deployments file, forge unavailable, etc.)
|
||||||
|
|
||||||
|
# ── Steps ──────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "read-addresses"
|
||||||
|
description = """
|
||||||
|
Read the LiquidityManager contract address from {deployments_file}.
|
||||||
|
Fail with exit code 2 if the file is absent or the address is missing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "collect-tvl"
|
||||||
|
description = """
|
||||||
|
Query LiquidityManager total ETH via forge script LmTotalEth.s.sol
|
||||||
|
against {rpc_url}.
|
||||||
|
Records tvl_eth (wei string) and tvl_eth_formatted (ETH, 2 dp).
|
||||||
|
LmTotalEth.s.sol uses exact Uniswap V3 integer math (LiquidityAmounts +
|
||||||
|
TickMath) to sum free ETH, free WETH, and ETH locked across all three
|
||||||
|
positions (floor, anchor, discovery).
|
||||||
|
"""
|
||||||
|
forge_script = "onchain/script/LmTotalEth.s.sol"
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "collect-fees"
|
||||||
|
description = """
|
||||||
|
Query accumulated protocol fees from the LiquidityManager via cast call:
|
||||||
|
cast call $LM "accumulatedFees()(uint256)"
|
||||||
|
Records accumulated_fees_eth (wei string) and accumulated_fees_eth_formatted
|
||||||
|
(ETH, 3 dp).
|
||||||
|
Falls back to 0 gracefully if the function reverts or is not present on
|
||||||
|
the deployed contract (older deployment without fee tracking).
|
||||||
|
"""
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "collect-positions"
|
||||||
|
description = """
|
||||||
|
Query the three Uniswap V3 positions held by the LiquidityManager:
|
||||||
|
LiquidityManager.positions(0) → (liquidity, tickLower, tickUpper) # FLOOR
|
||||||
|
LiquidityManager.positions(1) → (liquidity, tickLower, tickUpper) # ANCHOR
|
||||||
|
LiquidityManager.positions(2) → (liquidity, tickLower, tickUpper) # DISCOVERY
|
||||||
|
Records position_count (number of positions with liquidity > 0) and the
|
||||||
|
positions array.
|
||||||
|
"""
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "collect-rebalances"
|
||||||
|
description = """
|
||||||
|
Count Recenter events emitted by the LiquidityManager in the past
|
||||||
|
{lookback_blocks} blocks via eth_getLogs.
|
||||||
|
Records:
|
||||||
|
- rebalance_count_24h: total Recenter event count in the window.
|
||||||
|
- last_rebalance_block: block number of the most recent Recenter event
|
||||||
|
(0 if none found in the window).
|
||||||
|
"""
|
||||||
|
event_signature = "Recentered(int24,bool)"
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "collect"
|
||||||
|
description = """
|
||||||
|
Assemble all collected metrics into evidence/protocol/{date}.json.
|
||||||
|
Compute verdict:
|
||||||
|
- "offline" if tvl_eth = 0 or RPC was unreachable.
|
||||||
|
- "degraded" if position_count < 3, or rebalance_count_24h = 0 and the
|
||||||
|
protocol has been live for > 1 day.
|
||||||
|
- "healthy" otherwise.
|
||||||
|
Write the file conforming to the schema in evidence/README.md
|
||||||
|
## Schema: protocol/YYYY-MM-DD.json.
|
||||||
|
"""
|
||||||
|
output = "evidence/protocol/{date}.json"
|
||||||
|
schema = "evidence/README.md" # see ## Schema: protocol/YYYY-MM-DD.json
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "deliver"
|
||||||
|
description = """
|
||||||
|
Commit evidence/protocol/{date}.json to main.
|
||||||
|
Post a one-line summary comment to the originating issue (if any):
|
||||||
|
verdict, tvl_eth_formatted, accumulated_fees_eth_formatted,
|
||||||
|
position_count, rebalance_count_24h.
|
||||||
|
On "degraded" or "offline": highlight the failing dimension and its value.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# ── Products ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[products.evidence_file]
|
||||||
|
path = "evidence/protocol/{date}.json"
|
||||||
|
delivery = "commit to main"
|
||||||
|
schema = "evidence/README.md" # see ## Schema: protocol/YYYY-MM-DD.json
|
||||||
|
|
||||||
|
[products.issue_comment]
|
||||||
|
delivery = "post to originating issue (if any)"
|
||||||
|
content = "verdict, tvl_eth_formatted, accumulated_fees_eth_formatted, position_count, rebalance_count_24h"
|
||||||
|
on_degraded = "highlight failing dimension and its current value"
|
||||||
|
|
||||||
|
# ── Resources ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[resources]
|
||||||
|
profile = "light"
|
||||||
|
compute = "local — forge script + cast calls only; no Anvil or Docker startup required"
|
||||||
|
rpc = "Base network RPC ({rpc_url}) — read-only calls"
|
||||||
|
concurrency = "safe to run in parallel with other formulas"
|
||||||
|
|
||||||
|
# ── Notes ──────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[notes]
|
||||||
|
tvl_metric = """
|
||||||
|
TVL is measured as LiquidityManager total ETH: free ETH + free WETH + ETH
|
||||||
|
locked across all three Uniswap V3 positions (floor, anchor, discovery).
|
||||||
|
Uses the same LmTotalEth.s.sol forge script as run-red-team to ensure
|
||||||
|
consistent measurement methodology.
|
||||||
|
"""
|
||||||
|
|
||||||
|
rebalance_staleness = """
|
||||||
|
A zero rebalance_count_24h on an established deployment indicates the
|
||||||
|
recenter() upkeep bot (services/txnBot) has stalled. The "degraded"
|
||||||
|
verdict triggers a planner alert. On a fresh deployment (< 1 day old)
|
||||||
|
zero rebalances is expected and does not trigger degraded.
|
||||||
|
"""
|
||||||
|
|
||||||
|
fees_fallback = """
|
||||||
|
accumulated_fees_eth falls back to 0 for deployments without fee tracking.
|
||||||
|
The verdict is not affected by a zero fee value alone — only TVL and
|
||||||
|
position_count drive the verdict.
|
||||||
|
"""
|
||||||
155
formulas/run-resources.toml
Normal file
155
formulas/run-resources.toml
Normal file
|
|
@ -0,0 +1,155 @@
|
||||||
|
# formulas/run-resources.toml
|
||||||
|
#
|
||||||
|
# Infrastructure resource snapshot — collect disk usage, RAM trends,
|
||||||
|
# Anthropic API call counts and budget burn, and Woodpecker CI queue depth.
|
||||||
|
# Write a structured JSON evidence file for planner and predictor consumption.
|
||||||
|
#
|
||||||
|
# Type: sense. Read-only — produces metrics only, no git artifacts.
|
||||||
|
#
|
||||||
|
# Staleness threshold: 1 day (matches evidence/resources/ schema).
|
||||||
|
# Cron: daily at 06:00 UTC.
|
||||||
|
|
||||||
|
[formula]
|
||||||
|
id = "run-resources"
|
||||||
|
name = "Infrastructure Resource Snapshot"
|
||||||
|
description = "Collect disk, RAM, API usage, Anthropic budget burn, and CI queue depth; write evidence/resources/{date}.json."
|
||||||
|
type = "sense"
|
||||||
|
# "sense" → read-only, produces metrics only
|
||||||
|
# "act" → produces git artifacts (cf. run-evolution, run-red-team)
|
||||||
|
|
||||||
|
# ── Cron ───────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[cron]
|
||||||
|
schedule = "0 6 * * *" # daily at 06:00 UTC
|
||||||
|
description = "Matches 1-day staleness threshold — one snapshot per day keeps the record fresh."
|
||||||
|
|
||||||
|
# ── Inputs ─────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[inputs.disk_path]
|
||||||
|
type = "string"
|
||||||
|
required = false
|
||||||
|
default = "/"
|
||||||
|
description = "Filesystem path to measure disk usage for (passed to df)."
|
||||||
|
|
||||||
|
[inputs.anthropic_budget_usd_limit]
|
||||||
|
type = "number"
|
||||||
|
required = false
|
||||||
|
default = 50.0
|
||||||
|
description = "Configured Anthropic budget ceiling in USD. Used to compute budget_pct in the evidence record."
|
||||||
|
|
||||||
|
[inputs.woodpecker_api_url]
|
||||||
|
type = "string"
|
||||||
|
required = false
|
||||||
|
default = "http://localhost:8090"
|
||||||
|
description = "Base URL of the Woodpecker CI API. Set to empty string to skip CI metrics."
|
||||||
|
|
||||||
|
# ── Execution ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[execution]
|
||||||
|
script = "scripts/harb-evaluator/run-resources.sh"
|
||||||
|
invocation = "DISK_PATH={disk_path} ANTHROPIC_BUDGET_USD_LIMIT={anthropic_budget_usd_limit} WOODPECKER_API_URL={woodpecker_api_url} bash scripts/harb-evaluator/run-resources.sh"
|
||||||
|
|
||||||
|
# Exit codes:
|
||||||
|
# 0 snapshot written successfully
|
||||||
|
# 2 infrastructure error (disk command unavailable, JSON write failed, etc.)
|
||||||
|
|
||||||
|
# ── Steps ──────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "collect-disk"
|
||||||
|
description = """
|
||||||
|
Measure disk usage on {disk_path} via `df -B1 {disk_path}`.
|
||||||
|
Extract used_bytes, total_bytes, and used_pct.
|
||||||
|
"""
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "collect-ram"
|
||||||
|
description = """
|
||||||
|
Measure RAM usage via `free -b` (Linux) or `vm_stat` (macOS).
|
||||||
|
Extract used_bytes, total_bytes, and used_pct.
|
||||||
|
"""
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "collect-api"
|
||||||
|
description = """
|
||||||
|
Collect Anthropic API metrics:
|
||||||
|
- anthropic_calls_24h: count of API calls in the past 24 hours (read from
|
||||||
|
tmp/anthropic-call-log.jsonl if present; 0 if absent).
|
||||||
|
- anthropic_budget_usd_used: sum of cost_usd entries in the call log for
|
||||||
|
the current calendar day (UTC); 0 if log absent.
|
||||||
|
- anthropic_budget_usd_limit: from {anthropic_budget_usd_limit} input.
|
||||||
|
- anthropic_budget_pct: used / limit * 100 (0 if limit = 0).
|
||||||
|
"""
|
||||||
|
call_log = "tmp/anthropic-call-log.jsonl"
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "collect-ci"
|
||||||
|
description = """
|
||||||
|
Query Woodpecker CI API for queue state.
|
||||||
|
GET {woodpecker_api_url}/api/queue/info:
|
||||||
|
- woodpecker_queue_depth: length of the waiting queue.
|
||||||
|
- woodpecker_running: count of currently running jobs.
|
||||||
|
Skipped gracefully (fields set to null) when {woodpecker_api_url} is empty
|
||||||
|
or the endpoint is unreachable.
|
||||||
|
"""
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "collect"
|
||||||
|
description = """
|
||||||
|
Assemble all collected metrics into evidence/resources/{date}.json.
|
||||||
|
Compute verdict:
|
||||||
|
- "critical" if disk_used_pct ≥ 95, ram_used_pct ≥ 95,
|
||||||
|
or anthropic_budget_pct ≥ 95.
|
||||||
|
- "warn" if disk_used_pct ≥ 80, ram_used_pct ≥ 80,
|
||||||
|
or anthropic_budget_pct ≥ 80.
|
||||||
|
- "ok" otherwise.
|
||||||
|
Write the file conforming to the schema in evidence/README.md
|
||||||
|
## Schema: resources/YYYY-MM-DD.json.
|
||||||
|
"""
|
||||||
|
output = "evidence/resources/{date}.json"
|
||||||
|
schema = "evidence/README.md" # see ## Schema: resources/YYYY-MM-DD.json
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "deliver"
|
||||||
|
description = """
|
||||||
|
Commit evidence/resources/{date}.json to main.
|
||||||
|
Post a one-line summary comment to the originating issue (if any):
|
||||||
|
verdict, disk_used_pct, ram_used_pct, anthropic_budget_pct, ci queue depth.
|
||||||
|
On "warn" or "critical": highlight the breaching dimensions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# ── Products ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[products.evidence_file]
|
||||||
|
path = "evidence/resources/{date}.json"
|
||||||
|
delivery = "commit to main"
|
||||||
|
schema = "evidence/README.md" # see ## Schema: resources/YYYY-MM-DD.json
|
||||||
|
|
||||||
|
[products.issue_comment]
|
||||||
|
delivery = "post to originating issue (if any)"
|
||||||
|
content = "verdict, disk_used_pct, ram_used_pct, anthropic_budget_pct, ci queue depth"
|
||||||
|
on_warn = "highlight breaching dimensions and current values"
|
||||||
|
|
||||||
|
# ── Resources ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[resources]
|
||||||
|
profile = "light"
|
||||||
|
compute = "local — shell commands only (df, free, curl); no Docker or Anvil required"
|
||||||
|
concurrency = "safe to run in parallel with other formulas"
|
||||||
|
|
||||||
|
# ── Notes ──────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[notes]
|
||||||
|
call_log = """
|
||||||
|
tmp/anthropic-call-log.jsonl is expected to have one JSON object per line,
|
||||||
|
each with at minimum:
|
||||||
|
{ "ts": "<ISO timestamp>", "cost_usd": <number> }
|
||||||
|
The file is written by the dark-factory agent loop. When absent the API
|
||||||
|
metrics default to 0 — the snapshot is still written rather than failing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
disk_warn = """
|
||||||
|
Planner MEMORY.md (2026-03-20) notes disk at 79%. The "warn" threshold
|
||||||
|
(≥80%) will fire on the first run-resources pass. Monitor trajectory;
|
||||||
|
evidence pipeline data accumulation will increase disk pressure.
|
||||||
|
"""
|
||||||
186
scripts/harb-evaluator/run-protocol.sh
Executable file
186
scripts/harb-evaluator/run-protocol.sh
Executable file
|
|
@ -0,0 +1,186 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# run-protocol.sh — On-chain protocol health snapshot.
|
||||||
|
#
|
||||||
|
# Collects TVL, accumulated fees, position count, and rebalance frequency
|
||||||
|
# from the deployed LiquidityManager. Writes evidence/protocol/YYYY-MM-DD.json.
|
||||||
|
#
|
||||||
|
# Exit codes:
|
||||||
|
# 0 snapshot written successfully
|
||||||
|
# 2 infrastructure error (RPC unreachable, missing deployments, forge unavailable)
|
||||||
|
#
|
||||||
|
# Environment:
|
||||||
|
# RPC_URL Base network RPC endpoint (required)
|
||||||
|
# DEPLOYMENTS_FILE path to deployments JSON (default: onchain/deployments-local.json)
|
||||||
|
# LOOKBACK_BLOCKS blocks to scan for Recentered events (default: 7200, ~24 h on Base)
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
CAST="${CAST:-/home/debian/.foundry/bin/cast}"
|
||||||
|
FORGE="${FORGE:-/home/debian/.foundry/bin/forge}"
|
||||||
|
REPO_ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
||||||
|
DATE="$(date -u +%Y-%m-%d)"
|
||||||
|
OUT_DIR="$REPO_ROOT/evidence/protocol"
|
||||||
|
OUT_FILE="$OUT_DIR/$DATE.json"
|
||||||
|
|
||||||
|
RPC_URL="${RPC_URL:?RPC_URL is required}"
|
||||||
|
DEPLOYMENTS_FILE="${DEPLOYMENTS_FILE:-onchain/deployments-local.json}"
|
||||||
|
LOOKBACK_BLOCKS="${LOOKBACK_BLOCKS:-7200}"
|
||||||
|
|
||||||
|
# Resolve relative deployments path against repo root
|
||||||
|
if [[ "$DEPLOYMENTS_FILE" != /* ]]; then
|
||||||
|
DEPLOYMENTS_FILE="$REPO_ROOT/$DEPLOYMENTS_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
die() { echo "ERROR: $*" >&2; exit 2; }
|
||||||
|
|
||||||
|
mkdir -p "$OUT_DIR"
|
||||||
|
|
||||||
|
# ── read-addresses ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
[[ -f "$DEPLOYMENTS_FILE" ]] || die "Deployments file not found: $DEPLOYMENTS_FILE"
|
||||||
|
|
||||||
|
LM=$(jq -r '.contracts.LiquidityManager' "$DEPLOYMENTS_FILE")
|
||||||
|
POOL=$(jq -r '.contracts.Pool' "$DEPLOYMENTS_FILE")
|
||||||
|
WETH=0x4200000000000000000000000000000000000006
|
||||||
|
|
||||||
|
[[ -n "$LM" && "$LM" != "null" ]] || die "LiquidityManager address missing from $DEPLOYMENTS_FILE"
|
||||||
|
[[ -n "$POOL" && "$POOL" != "null" ]] || die "Pool address missing from $DEPLOYMENTS_FILE"
|
||||||
|
|
||||||
|
# ── collect block number ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
block_number=$("$CAST" block-number --rpc-url "$RPC_URL" 2>/dev/null) \
|
||||||
|
|| die "RPC unreachable at $RPC_URL"
|
||||||
|
|
||||||
|
# ── collect-tvl ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
tvl_eth="0"
|
||||||
|
tvl_eth_formatted="0.00"
|
||||||
|
|
||||||
|
if tvl_output=$(cd "$REPO_ROOT" && LM="$LM" WETH="$WETH" POOL="$POOL" \
|
||||||
|
"$FORGE" script onchain/script/LmTotalEth.s.sol \
|
||||||
|
--rpc-url "$RPC_URL" --silent 2>/dev/null); then
|
||||||
|
# forge script outputs the number via console2.log — extract last number
|
||||||
|
tvl_eth=$(echo "$tvl_output" | grep -oE '[0-9]+' | tail -1)
|
||||||
|
tvl_eth="${tvl_eth:-0}"
|
||||||
|
tvl_eth_formatted=$(awk "BEGIN {printf \"%.2f\", $tvl_eth / 1e18}")
|
||||||
|
else
|
||||||
|
echo "WARN: LmTotalEth forge script failed, TVL will be 0" >&2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── collect-fees ──────────────────────────────────────────────────────────────
|
||||||
|
# accumulatedFees() may not exist on older deployments — graceful fallback to 0.
|
||||||
|
|
||||||
|
accumulated_fees_eth="0"
|
||||||
|
accumulated_fees_eth_formatted="0.000"
|
||||||
|
|
||||||
|
if fees_output=$("$CAST" call "$LM" "accumulatedFees()(uint256)" --rpc-url "$RPC_URL" 2>/dev/null); then
|
||||||
|
accumulated_fees_eth=$(echo "$fees_output" | grep -oE '[0-9]+' | head -1)
|
||||||
|
accumulated_fees_eth="${accumulated_fees_eth:-0}"
|
||||||
|
accumulated_fees_eth_formatted=$(awk "BEGIN {printf \"%.3f\", $accumulated_fees_eth / 1e18}")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── collect-positions ─────────────────────────────────────────────────────────
|
||||||
|
# LiquidityManager.positions(uint8 stage) → (uint128 liquidity, int24 tickLower, int24 tickUpper)
|
||||||
|
# Stage: 0=FLOOR, 1=ANCHOR, 2=DISCOVERY
|
||||||
|
|
||||||
|
position_count=0
|
||||||
|
positions_json="["
|
||||||
|
stage_names=("floor" "anchor" "discovery")
|
||||||
|
|
||||||
|
for stage in 0 1 2; do
|
||||||
|
name="${stage_names[$stage]}"
|
||||||
|
|
||||||
|
if pos_output=$("$CAST" call "$LM" "positions(uint8)(uint128,int24,int24)" "$stage" --rpc-url "$RPC_URL" 2>/dev/null); then
|
||||||
|
# cast returns one value per line
|
||||||
|
liquidity=$(echo "$pos_output" | sed -n '1p' | tr -d '[:space:]')
|
||||||
|
tick_lower=$(echo "$pos_output" | sed -n '2p' | tr -d '[:space:]')
|
||||||
|
tick_upper=$(echo "$pos_output" | sed -n '3p' | tr -d '[:space:]')
|
||||||
|
|
||||||
|
liquidity="${liquidity:-0}"
|
||||||
|
tick_lower="${tick_lower:-0}"
|
||||||
|
tick_upper="${tick_upper:-0}"
|
||||||
|
|
||||||
|
if [[ "$liquidity" != "0" ]]; then
|
||||||
|
position_count=$((position_count + 1))
|
||||||
|
fi
|
||||||
|
|
||||||
|
[[ "$stage" -gt 0 ]] && positions_json+=","
|
||||||
|
positions_json+="
|
||||||
|
{
|
||||||
|
\"name\": \"$name\",
|
||||||
|
\"tick_lower\": $tick_lower,
|
||||||
|
\"tick_upper\": $tick_upper,
|
||||||
|
\"liquidity\": \"$liquidity\"
|
||||||
|
}"
|
||||||
|
else
|
||||||
|
echo "WARN: Failed to read positions($stage) from LiquidityManager" >&2
|
||||||
|
[[ "$stage" -gt 0 ]] && positions_json+=","
|
||||||
|
positions_json+="
|
||||||
|
{
|
||||||
|
\"name\": \"$name\",
|
||||||
|
\"tick_lower\": 0,
|
||||||
|
\"tick_upper\": 0,
|
||||||
|
\"liquidity\": \"0\"
|
||||||
|
}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
positions_json+="
|
||||||
|
]"
|
||||||
|
|
||||||
|
# ── collect-rebalances ────────────────────────────────────────────────────────
|
||||||
|
# Event: Recentered(int24 indexed currentTick, bool indexed isUp)
|
||||||
|
|
||||||
|
rebalance_count_24h=0
|
||||||
|
last_rebalance_block=0
|
||||||
|
|
||||||
|
from_block=$((block_number - LOOKBACK_BLOCKS))
|
||||||
|
[[ "$from_block" -lt 0 ]] && from_block=0
|
||||||
|
|
||||||
|
# Recentered(int24,bool) topic0
|
||||||
|
event_topic=$("$CAST" keccak "Recentered(int24,bool)" 2>/dev/null) || event_topic=""
|
||||||
|
|
||||||
|
if [[ -n "$event_topic" ]]; then
|
||||||
|
if logs=$("$CAST" logs --from-block "$from_block" --to-block "$block_number" \
|
||||||
|
--address "$LM" "$event_topic" --rpc-url "$RPC_URL" 2>/dev/null); then
|
||||||
|
rebalance_count_24h=$(echo "$logs" | grep -c "blockNumber" 2>/dev/null || echo "0")
|
||||||
|
|
||||||
|
last_block_hex=$(echo "$logs" | grep "blockNumber" | tail -1 | grep -oE '0x[0-9a-fA-F]+' | head -1)
|
||||||
|
if [[ -n "$last_block_hex" ]]; then
|
||||||
|
last_rebalance_block=$(printf '%d' "$last_block_hex" 2>/dev/null || echo "0")
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "WARN: Failed to fetch Recentered event logs" >&2
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── verdict ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
verdict="healthy"
|
||||||
|
|
||||||
|
if [[ "$tvl_eth" == "0" ]]; then
|
||||||
|
verdict="offline"
|
||||||
|
elif [[ "$position_count" -lt 3 ]] || [[ "$rebalance_count_24h" -eq 0 ]]; then
|
||||||
|
verdict="degraded"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── write JSON ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
cat > "$OUT_FILE" <<ENDJSON
|
||||||
|
{
|
||||||
|
"date": "$DATE",
|
||||||
|
"block_number": $block_number,
|
||||||
|
"tvl_eth": "$tvl_eth",
|
||||||
|
"tvl_eth_formatted": "$tvl_eth_formatted",
|
||||||
|
"accumulated_fees_eth": "$accumulated_fees_eth",
|
||||||
|
"accumulated_fees_eth_formatted": "$accumulated_fees_eth_formatted",
|
||||||
|
"position_count": $position_count,
|
||||||
|
"positions": $positions_json,
|
||||||
|
"rebalance_count_24h": $rebalance_count_24h,
|
||||||
|
"last_rebalance_block": $last_rebalance_block,
|
||||||
|
"staleness_threshold_days": 1,
|
||||||
|
"verdict": "$verdict"
|
||||||
|
}
|
||||||
|
ENDJSON
|
||||||
|
|
||||||
|
echo "evidence/protocol/$DATE.json written — verdict: $verdict"
|
||||||
169
scripts/harb-evaluator/run-resources.sh
Executable file
169
scripts/harb-evaluator/run-resources.sh
Executable file
|
|
@ -0,0 +1,169 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# run-resources.sh — Infrastructure resource snapshot.
|
||||||
|
#
|
||||||
|
# Collects disk usage, RAM usage, Anthropic API call counts and budget burn,
|
||||||
|
# and Woodpecker CI queue depth. Writes evidence/resources/YYYY-MM-DD.json.
|
||||||
|
#
|
||||||
|
# Exit codes:
|
||||||
|
# 0 snapshot written successfully
|
||||||
|
# 2 infrastructure error
|
||||||
|
#
|
||||||
|
# Environment:
|
||||||
|
# DISK_PATH filesystem to measure (default: /)
|
||||||
|
# ANTHROPIC_BUDGET_USD_LIMIT budget ceiling in USD (default: 50)
|
||||||
|
# WOODPECKER_API_URL Woodpecker CI base URL (default: http://localhost:8090; empty to skip)
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
REPO_ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
||||||
|
DATE="$(date -u +%Y-%m-%d)"
|
||||||
|
OUT_DIR="$REPO_ROOT/evidence/resources"
|
||||||
|
OUT_FILE="$OUT_DIR/$DATE.json"
|
||||||
|
CALL_LOG="$REPO_ROOT/tmp/anthropic-call-log.jsonl"
|
||||||
|
|
||||||
|
DISK_PATH="${DISK_PATH:-/}"
|
||||||
|
ANTHROPIC_BUDGET_USD_LIMIT="${ANTHROPIC_BUDGET_USD_LIMIT:-50}"
|
||||||
|
WOODPECKER_API_URL="${WOODPECKER_API_URL:-http://localhost:8090}"
|
||||||
|
|
||||||
|
die() { echo "ERROR: $*" >&2; exit 2; }
|
||||||
|
|
||||||
|
mkdir -p "$OUT_DIR"
|
||||||
|
|
||||||
|
# ── collect-disk ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
disk_used_bytes=0
|
||||||
|
disk_total_bytes=0
|
||||||
|
disk_used_pct=0
|
||||||
|
|
||||||
|
if command -v df >/dev/null 2>&1; then
|
||||||
|
# -B1 gives bytes; tail -1 skips header; awk grabs used, total, pct
|
||||||
|
read -r disk_total_bytes disk_used_bytes disk_used_pct < <(
|
||||||
|
df -B1 "$DISK_PATH" | tail -1 | awk '{gsub(/%/,"",$5); print $2, $3, $5}'
|
||||||
|
)
|
||||||
|
else
|
||||||
|
echo "WARN: df not available, disk metrics will be 0" >&2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── collect-ram ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
ram_used_bytes=0
|
||||||
|
ram_total_bytes=0
|
||||||
|
ram_used_pct=0
|
||||||
|
|
||||||
|
if command -v free >/dev/null 2>&1; then
|
||||||
|
# free -b: second line (Mem:) has total, used
|
||||||
|
read -r ram_total_bytes ram_used_bytes < <(
|
||||||
|
free -b | awk '/^Mem:/ {print $2, $3}'
|
||||||
|
)
|
||||||
|
if [[ "$ram_total_bytes" -gt 0 ]]; then
|
||||||
|
ram_used_pct=$(awk "BEGIN {printf \"%.1f\", $ram_used_bytes / $ram_total_bytes * 100}")
|
||||||
|
fi
|
||||||
|
elif command -v vm_stat >/dev/null 2>&1; then
|
||||||
|
# macOS fallback
|
||||||
|
page_size=$(vm_stat | head -1 | grep -o '[0-9]*')
|
||||||
|
pages_active=$(vm_stat | awk '/Pages active/ {gsub(/\./,"",$3); print $3}')
|
||||||
|
pages_wired=$(vm_stat | awk '/Pages wired/ {gsub(/\./,"",$4); print $4}')
|
||||||
|
pages_free=$(vm_stat | awk '/Pages free/ {gsub(/\./,"",$3); print $3}')
|
||||||
|
pages_inactive=$(vm_stat | awk '/Pages inactive/ {gsub(/\./,"",$3); print $3}')
|
||||||
|
ram_used_bytes=$(( (pages_active + pages_wired) * page_size ))
|
||||||
|
ram_total_bytes=$(( (pages_active + pages_wired + pages_free + pages_inactive) * page_size ))
|
||||||
|
if [[ "$ram_total_bytes" -gt 0 ]]; then
|
||||||
|
ram_used_pct=$(awk "BEGIN {printf \"%.1f\", $ram_used_bytes / $ram_total_bytes * 100}")
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "WARN: neither free nor vm_stat available, RAM metrics will be 0" >&2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── collect-api ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
anthropic_calls_24h=0
|
||||||
|
anthropic_budget_usd_used=0
|
||||||
|
anthropic_budget_pct=0
|
||||||
|
|
||||||
|
if [[ -f "$CALL_LOG" ]]; then
|
||||||
|
cutoff=$(date -u -d '24 hours ago' +%Y-%m-%dT%H:%M:%S 2>/dev/null \
|
||||||
|
|| date -u -v-24H +%Y-%m-%dT%H:%M:%S 2>/dev/null \
|
||||||
|
|| echo "")
|
||||||
|
today=$(date -u +%Y-%m-%d)
|
||||||
|
|
||||||
|
if [[ -n "$cutoff" ]]; then
|
||||||
|
anthropic_calls_24h=$(awk -F'"ts"' -v cutoff="$cutoff" '
|
||||||
|
NF>1 { split($2,a,"\""); if (a[2] >= cutoff) count++ }
|
||||||
|
END { print count+0 }
|
||||||
|
' "$CALL_LOG")
|
||||||
|
fi
|
||||||
|
|
||||||
|
anthropic_budget_usd_used=$(awk -F'"' -v today="$today" '
|
||||||
|
/"ts"/ && $0 ~ today {
|
||||||
|
match($0, /"cost_usd"[[:space:]]*:[[:space:]]*([0-9.]+)/, m)
|
||||||
|
if (m[1] != "") sum += m[1]
|
||||||
|
}
|
||||||
|
END { printf "%.2f", sum+0 }
|
||||||
|
' "$CALL_LOG")
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$ANTHROPIC_BUDGET_USD_LIMIT" != "0" ]]; then
|
||||||
|
anthropic_budget_pct=$(awk "BEGIN {printf \"%.1f\", $anthropic_budget_usd_used / $ANTHROPIC_BUDGET_USD_LIMIT * 100}")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── collect-ci ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
woodpecker_queue_depth="null"
|
||||||
|
woodpecker_running="null"
|
||||||
|
|
||||||
|
if [[ -n "$WOODPECKER_API_URL" ]]; then
|
||||||
|
if ci_json=$(curl -sf --max-time 5 "$WOODPECKER_API_URL/api/queue/info" 2>/dev/null); then
|
||||||
|
woodpecker_queue_depth=$(echo "$ci_json" | jq '.pending // .waiting // 0')
|
||||||
|
woodpecker_running=$(echo "$ci_json" | jq '.running // 0')
|
||||||
|
else
|
||||||
|
echo "WARN: Woodpecker CI unreachable at $WOODPECKER_API_URL, CI metrics will be null" >&2
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── verdict ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
verdict="ok"
|
||||||
|
|
||||||
|
for pct in "$disk_used_pct" "$ram_used_pct" "$anthropic_budget_pct"; do
|
||||||
|
# Strip trailing % if present
|
||||||
|
pct_num="${pct//%/}"
|
||||||
|
if awk "BEGIN {exit !($pct_num >= 95)}"; then
|
||||||
|
verdict="critical"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
if awk "BEGIN {exit !($pct_num >= 80)}"; then
|
||||||
|
verdict="warn"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# ── write JSON ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
cat > "$OUT_FILE" <<ENDJSON
|
||||||
|
{
|
||||||
|
"date": "$DATE",
|
||||||
|
"disk": {
|
||||||
|
"used_bytes": $disk_used_bytes,
|
||||||
|
"total_bytes": $disk_total_bytes,
|
||||||
|
"used_pct": $disk_used_pct
|
||||||
|
},
|
||||||
|
"ram": {
|
||||||
|
"used_bytes": $ram_used_bytes,
|
||||||
|
"total_bytes": $ram_total_bytes,
|
||||||
|
"used_pct": $ram_used_pct
|
||||||
|
},
|
||||||
|
"api": {
|
||||||
|
"anthropic_calls_24h": $anthropic_calls_24h,
|
||||||
|
"anthropic_budget_usd_used": $anthropic_budget_usd_used,
|
||||||
|
"anthropic_budget_usd_limit": $ANTHROPIC_BUDGET_USD_LIMIT,
|
||||||
|
"anthropic_budget_pct": $anthropic_budget_pct
|
||||||
|
},
|
||||||
|
"ci": {
|
||||||
|
"woodpecker_queue_depth": $woodpecker_queue_depth,
|
||||||
|
"woodpecker_running": $woodpecker_running
|
||||||
|
},
|
||||||
|
"staleness_threshold_days": 1,
|
||||||
|
"verdict": "$verdict"
|
||||||
|
}
|
||||||
|
ENDJSON
|
||||||
|
|
||||||
|
echo "evidence/resources/$DATE.json written — verdict: $verdict"
|
||||||
Loading…
Add table
Add a link
Reference in a new issue