fix: address review feedback on fetch-events.ts (#315)

- Replace hardcoded Infura API key with INFURA_API_KEY env var; fail fast
  with a helpful message if unset and no --rpc-url is given
- Add onchain/script/backtesting/.gitignore (cache/) instead of relying on
  the opaque root pattern; remove force-tracked cache/.gitkeep (mkdirSync
  creates the directory at runtime)
- Document resume constraint: reliable only when both --start-block and
  --end-block are explicit, or --output is set
- Fix batch-number display: derive batchNum inside the loop from the actual
  `from` block so it stays correct when resumeFromBlock isn't BATCH_SIZE-aligned
- Guard log.logIndex === null consistently with blockNumber/transactionHash
- console.warn on decode errors instead of silently discarding them

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
openhands 2026-02-26 22:45:40 +00:00
parent df89b8c2da
commit c33bdbaad5
3 changed files with 34 additions and 11 deletions

2
onchain/script/backtesting/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
# Cache files generated by fetch-events.ts at runtime — do not commit
cache/

View file

@ -7,6 +7,13 @@
* npx tsx fetch-events.ts --pool 0x0d59... --days 7
* npx tsx fetch-events.ts --pool 0x0d59... --start-block 12345678 --end-block 12989078
* npx tsx fetch-events.ts --pool 0x0d59... --days 7 --output /tmp/events.jsonl
*
* Requires the INFURA_API_KEY environment variable unless --rpc-url is given.
*
* NOTE: Resume (re-run with the same flags) only works reliably when both
* --start-block and --end-block are provided, or when --output is explicit.
* Without --end-block the output filename embeds the runtime latestBlock,
* which changes on subsequent runs and creates a new file instead of resuming.
*/
import { createPublicClient, http, parseAbi, decodeEventLog } from "viem";
@ -20,8 +27,19 @@ import { fileURLToPath } from "url";
// Constants
// ---------------------------------------------------------------------------
const DEFAULT_INFURA_URL =
"https://base-mainnet.infura.io/v3/409c42ecaa4e405bb5735faac0f7aec2";
/** Build the default RPC URL from the INFURA_API_KEY environment variable. */
function buildDefaultRpcUrl(): string {
const key = process.env.INFURA_API_KEY;
if (!key) {
console.error(
"Error: INFURA_API_KEY environment variable is not set.\n" +
" Set it with: export INFURA_API_KEY=<your-infura-project-id>\n" +
" or pass --rpc-url <url> to use a different RPC endpoint."
);
process.exit(1);
}
return `https://base-mainnet.infura.io/v3/${key}`;
}
/** Base mainnet produces ~1 block every 2 seconds → ~43 200 blocks/day. */
const BASE_BLOCKS_PER_DAY = 43_200;
@ -88,7 +106,7 @@ function parseArgs(): Args {
startBlock: startBlockRaw !== undefined ? parseInt(startBlockRaw, 10) : null,
endBlock: endBlockRaw !== undefined ? parseInt(endBlockRaw, 10) : null,
output: getFlag("--output") ?? null,
rpcUrl: getFlag("--rpc-url") ?? DEFAULT_INFURA_URL,
rpcUrl: getFlag("--rpc-url") ?? buildDefaultRpcUrl(),
};
}
@ -208,15 +226,14 @@ async function main(): Promise<void> {
// Fetch in batches
// ------------------------------------------------------------------
const totalBatches = Math.ceil((endBlock - startBlock + 1) / BATCH_SIZE);
// Batch index that corresponds to resumeFromBlock (1-based for display)
const startBatchNum =
Math.floor((resumeFromBlock - startBlock) / BATCH_SIZE) + 1;
let totalEvents = 0;
let batchNum = startBatchNum;
for (let from = resumeFromBlock; from <= endBlock; from += BATCH_SIZE) {
const to = Math.min(from + BATCH_SIZE - 1, endBlock);
// Derive batch number from actual `from` position so it stays correct
// even when resumeFromBlock is not aligned to a BATCH_SIZE boundary.
const batchNum = Math.floor((from - startBlock) / BATCH_SIZE) + 1;
process.stdout.write(`Fetching blocks ${from}-${to}... `);
@ -256,7 +273,7 @@ async function main(): Promise<void> {
let batchEvents = 0;
for (const log of logs) {
if (log.blockNumber === null || log.transactionHash === null) continue;
if (log.blockNumber === null || log.transactionHash === null || log.logIndex === null) continue;
try {
const decoded = decodeEventLog({
@ -276,13 +293,17 @@ async function main(): Promise<void> {
appendFileSync(outputPath, JSON.stringify(entry) + "\n");
batchEvents++;
totalEvents++;
} catch {
// Skip any log we cannot decode (shouldn't happen for our known pool)
} catch (err) {
// Decode failure is unexpected for a filtered-by-address+topic0 log;
// warn so any ABI drift is visible rather than silently discarded.
console.warn(
`Warning: could not decode log tx=${log.transactionHash} logIndex=${log.logIndex}:`,
err instanceof Error ? err.message : err
);
}
}
console.log(`${batchEvents} events (${batchNum}/${totalBatches} batches)`);
batchNum++;
// Polite inter-batch delay (skip after last batch)
if (from + BATCH_SIZE <= endBlock) {