Skip to content

Instantly share code, notes, and snippets.

@o-az
Created December 18, 2025 08:14
Show Gist options
  • Select an option

  • Save o-az/4258ee692095fc406de7491da424eea6 to your computer and use it in GitHub Desktop.

Select an option

Save o-az/4258ee692095fc406de7491da424eea6 to your computer and use it in GitHub Desktop.
└── apps
└── contract-verification
├── reset.d.ts
├── .env.example
├── container
├── tsconfig.json
├── compiler.ts
└── index.ts
├── scripts
├── local-d1.sh
├── quick-verify-vyper.sh
├── medium-verify.sh
├── quick-verify.sh
├── Mail.s.sol
├── tempo-check.sh
└── verify-with-curl.sh
├── env.d.ts
├── Dockerfile
├── src
├── chains.ts
├── container.ts
├── utilities.ts
├── route.docs.tsx
├── index.tsx
├── database
│ └── schema.ts
├── route.verify-legacy.ts
├── route.lookup.ts
├── route.verify.ts
└── bytecode-matching.ts
├── drizzle.config.ts
├── wrangler.jsonc
├── vite.config.ts
├── tsconfig.json
├── package.json
└── README.md
/apps/contract-verification/reset.d.ts:
--------------------------------------------------------------------------------
1 | import '@total-typescript/ts-reset'
2 |
--------------------------------------------------------------------------------
/apps/contract-verification/.env.example:
--------------------------------------------------------------------------------
1 | PORT=6969
2 | NODE_ENV="development"
3 |
4 | CLOUDFLARE_ACCOUNT_ID=""
5 | CLOUDFLARE_DATABASE_ID=""
6 | CLOUDFLARE_D1_TOKEN=""
--------------------------------------------------------------------------------
/apps/contract-verification/container/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": ["../tsconfig.json"],
3 | "compilerOptions": {
4 | "types": ["bun", "node"]
5 | },
6 | "include": ["./**/*.ts"]
7 | }
8 |
--------------------------------------------------------------------------------
/apps/contract-verification/scripts/local-d1.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -eou pipefail
4 |
5 | # Scans `.wrangler/state/v3/d1/miniflare-D1DatabaseObject`
6 | # for the latest sqlite file and returns its full path
7 | d1_dir=".wrangler/state/v3/d1/miniflare-D1DatabaseObject"
8 |
9 | if [[ ! -d "$d1_dir" ]]; then
10 | echo "Error: D1 directory not found. Run 'bun run dev' first to create local D1." >&2
11 | exit 1
12 | fi
13 |
14 | # Find latest sqlite file (cross-platform: uses ls -t instead of GNU find -printf)
15 | latest_sqlite_file=$(find "$d1_dir" -type f -name '*.sqlite' -exec ls -t {} + 2>/dev/null | head -1)
16 |
17 | if [[ -z "$latest_sqlite_file" ]]; then
18 | echo "Error: No .sqlite files found in $d1_dir" >&2
19 | exit 1
20 | fi
21 |
22 | echo "$latest_sqlite_file"
--------------------------------------------------------------------------------
/apps/contract-verification/env.d.ts:
--------------------------------------------------------------------------------
1 | interface EnvironmentVariables {
2 | readonly PORT: string
3 |
4 | readonly VITE_LOG_LEVEL: 'info' | 'warn' | 'silent'
5 |
6 | readonly CLOUDFLARE_ACCOUNT_ID: string
7 | readonly CLOUDFLARE_DATABASE_ID: string
8 | readonly CLOUDFLARE_D1_TOKEN: string
9 | readonly CLOUDFLARE_D1_ENVIRONMENT: 'local' | (string & {})
10 | }
11 |
12 | // Node.js `process.env` auto-completion
13 | declare namespace NodeJS {
14 | interface ProcessEnv extends EnvironmentVariables {
15 | readonly NODE_ENV: 'development' | 'production'
16 | }
17 | }
18 |
19 | // Bun/vite `import.meta.env` auto-completion
20 | interface ImportMetaEnv extends EnvironmentVariables {}
21 |
22 | interface ImportMeta {
23 | readonly env: ImportMetaEnv
24 | }
25 |
26 | declare const __BASE_URL__: string
27 | declare const __BUILD_VERSION__: string
28 |
--------------------------------------------------------------------------------
/apps/contract-verification/Dockerfile:
--------------------------------------------------------------------------------
1 | # syntax=docker/dockerfile:1
2 | FROM oven/bun AS build
3 |
4 | RUN apt-get update \
5 | && apt-get install --yes libz1 \
6 | && rm -rf /var/lib/apt/lists/*
7 |
8 | ENV NODE_ENV="production"
9 |
10 | WORKDIR /usr/src/app
11 |
12 | COPY container gg
13 | RUN bun build \
14 | --compile \
15 | --target bun \
16 | --minify-syntax \
17 | --minify-whitespace \
18 | --outfile container \
19 | /usr/src/app/gg/index.ts
20 |
21 | FROM gcr.io/distroless/base
22 |
23 | WORKDIR /usr/src/app
24 |
25 | COPY --from=build /usr/src/app/container /usr/src/app/container
26 | # needed for vyper compiler
27 | COPY --from=build /usr/lib/x86_64-linux-gnu/libz.so.1 /usr/lib/x86_64-linux-gnu/libz.so.1
28 |
29 | ENV NODE_ENV="production"
30 |
31 | EXPOSE $PORT
32 | EXPOSE 8080
33 |
34 | ENTRYPOINT ["/usr/src/app/container"]
--------------------------------------------------------------------------------
/apps/contract-verification/scripts/quick-verify-vyper.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | TEMPO_RPC_URL="https://rpc-orchestra.testnet.tempo.xyz"
6 | # NOTE: This is a throaway PK created for this test
7 | PRIVATE_KEY=${PK:-"0xa4b3490c35582d544451fbbfd7a0e4c5fa4d0ded06563ccc199057c7a5e6c9de"}
8 | VERIFIER_URL=${VERIFIER_URL:-"https://contracts.tempo.xyz"}
9 |
10 | TEMP_DIR=$(mktemp -d)
11 | echo "Creating temporary directory $TEMP_DIR"
12 | cd "$TEMP_DIR"
13 |
14 | gh repo clone grandizzy/counter-vy "$TEMP_DIR"/counter-vy
15 | cd "$TEMP_DIR"/counter-vy
16 |
17 | echo -e "\n=== FORGE BUILD ==="
18 |
19 | forge build
20 | forge script script/Counter.s.sol \
21 | --rpc-url $TEMPO_RPC_URL \
22 | --private-key "$PRIVATE_KEY" \
23 | --broadcast \
24 | --verify \
25 | --verifier sourcify \
26 | --verifier-url "$VERIFIER_URL"
27 |
--------------------------------------------------------------------------------
/apps/contract-verification/scripts/medium-verify.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | TEMPO_RPC_URL="https://rpc-orchestra.testnet.tempo.xyz"
6 | # NOTE: This is a throaway PK created for this test
7 | PRIVATE_KEY=${PK:-"0xa4b3490c35582d544451fbbfd7a0e4c5fa4d0ded06563ccc199057c7a5e6c9de"}
8 | VERIFIER_URL=${VERIFIER_URL:-"https://contracts.tempo.xyz"}
9 |
10 | TEMP_DIR=$(mktemp -d)
11 | echo "Creating temporary directory $TEMP_DIR"
12 | cd "$TEMP_DIR"
13 |
14 | gh repo clone grandizzy/oz-dummy-token "$TEMP_DIR"/oz-dummy-token
15 | cd "$TEMP_DIR"/oz-dummy-token
16 |
17 | echo -e "\n=== FORGE BUILD ==="
18 |
19 | forge build
20 | forge script script/DeployMyToken.s.sol \
21 | --rpc-url $TEMPO_RPC_URL \
22 | --private-key "$PRIVATE_KEY" \
23 | --broadcast \
24 | --verify \
25 | --verifier sourcify \
26 | --verifier-url "$VERIFIER_URL"
--------------------------------------------------------------------------------
/apps/contract-verification/src/chains.ts:
--------------------------------------------------------------------------------
1 | import { tempoDevnet, tempoTestnet } from 'tempo.ts/chains'
2 |
3 | export const DEVNET_CHAIN_ID = tempoDevnet.id
4 | export const TESTNET_CHAIN_ID = tempoTestnet.id
5 |
6 | export const chains = {
7 | [tempoDevnet.id]: tempoDevnet,
8 | [tempoTestnet.id]: tempoTestnet,
9 | }
10 |
11 | // matches https://sourcify.dev/server/chains format
12 | export const sourcifyChains = [tempoDevnet, tempoTestnet].map((chain) => {
13 | const returnValue = {
14 | name: chain().name,
15 | title: chain().name,
16 | chainId: chain().id,
17 | rpc: [
18 | chain().rpcUrls.default.http,
19 | chain().rpcUrls.default.webSocket,
20 | ].flat(),
21 | supported: true,
22 | etherscanAPI: false,
23 | _extra: {},
24 | }
25 | // @ts-expect-error
26 | if (chain()?.blockExplorers)
27 | returnValue._extra = {
28 | // @ts-expect-error
29 | blockExplorer: chain()?.blockExplorers.default,
30 | }
31 |
32 | return returnValue
33 | })
34 |
--------------------------------------------------------------------------------
/apps/contract-verification/src/container.ts:
--------------------------------------------------------------------------------
1 | import { Container, type StopParams } from '@cloudflare/containers'
2 |
3 | export class VerificationContainer extends Container<Cloudflare.Env> {
4 | defaultPort = 8080
5 | sleepAfter = '10m'
6 | enableInternet = true
7 |
8 | override async onStart(): Promise<void> {
9 | console.log('onStart hook called')
10 |
11 | const response = await this.containerFetch('http://localhost:8080/health') // TODO: update domain
12 | if (!response.ok) throw new Error('Container health check failed')
13 |
14 | const data = await response.text()
15 | console.log('onStart hook called with data:', data)
16 | }
17 |
18 | override onStop(stopParams: StopParams): void {
19 | if (stopParams.exitCode === 0) console.log('Container stopped gracefully')
20 | else console.log('Container stopped with exit code:', stopParams.exitCode)
21 |
22 | console.log('Container stop reason:', stopParams.reason)
23 | }
24 |
25 | override onError(error: unknown): unknown {
26 | console.log('onError hook called with error:', error)
27 | throw error
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/apps/contract-verification/scripts/quick-verify.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | TEMPO_RPC_URL="https://rpc-orchestra.testnet.tempo.xyz"
6 | PRIVATE_KEY=${PK:-"0xa4b3490c35582d544451fbbfd7a0e4c5fa4d0ded06563ccc199057c7a5e6c9de"}
7 | VERIFIER_URL=${VERIFIER_URL:-"https://o.tail388b2e.ts.net"}
8 |
9 | TEMP_DIR=$(mktemp -d)
10 | echo "Creating temporary directory $TEMP_DIR"
11 | cd "$TEMP_DIR"
12 |
13 | CONTRACT_NAME="counter-verify_$(shuf -i 1000000-9999999 -n 1)"
14 | echo "Creating contract $TEMP_DIR/$CONTRACT_NAME"
15 |
16 |
17 | echo -e "A new tempo directory and the following command will run:\\n"
18 | echo -e "forge create src/Counter.sol:Counter \\
19 | --private-key=\"$PRIVATE_KEY\" \\
20 | --rpc-url $TEMPO_RPC_URL \\
21 | --broadcast \\
22 | --verify \\
23 | --verifier sourcify \\
24 | --verifier-url $VERIFIER_URL"
25 | echo -e "\\n"
26 |
27 |
28 | forge init "$CONTRACT_NAME" && \
29 | cd "$CONTRACT_NAME" && \
30 | forge create src/Counter.sol:Counter \
31 | --private-key="$PRIVATE_KEY" \
32 | --rpc-url $TEMPO_RPC_URL \
33 | --broadcast \
34 | --verify \
35 | --verifier sourcify \
36 | --verifier-url "$VERIFIER_URL"
--------------------------------------------------------------------------------
/apps/contract-verification/drizzle.config.ts:
--------------------------------------------------------------------------------
1 | import NodeChildProcess from 'node:child_process'
2 | import { type Config, defineConfig } from 'drizzle-kit'
3 |
4 | const isLocal = process.env.CLOUDFLARE_D1_ENVIRONMENT === 'local'
5 |
6 | const dbCredentials = (
7 | isLocal
8 | ? {
9 | url: NodeChildProcess.execSync('/bin/bash scripts/local-d1.sh')
10 | .toString()
11 | .trim(),
12 | }
13 | : {
14 | token: process.env.CLOUDFLARE_D1_TOKEN,
15 | accountId: process.env.CLOUDFLARE_ACCOUNT_ID,
16 | databaseId: process.env.CLOUDFLARE_DATABASE_ID,
17 | }
18 | ) satisfies SqliteDbCredentials | D1HttpDbCredentials
19 |
20 | export default defineConfig({
21 | out: './drizzle',
22 | schema: './src/database/schema.ts',
23 | // Use local SQLite for migrations, d1-http for remote
24 | ...(isLocal
25 | ? { dbCredentials, dialect: 'turso' }
26 | : { dbCredentials, dialect: 'sqlite', driver: 'd1-http' }),
27 | })
28 |
29 | type SqliteDbCredentials = Extract<
30 | Config,
31 | { dialect: 'turso'; driver?: never }
32 | >['dbCredentials']
33 | type D1HttpDbCredentials = Extract<
34 | Config,
35 | { dialect: 'sqlite'; driver: 'd1-http' }
36 | >['dbCredentials']
37 |
--------------------------------------------------------------------------------
/apps/contract-verification/scripts/Mail.s.sol:
--------------------------------------------------------------------------------
1 | // SPDX-License-Identifier: UNLICENSED
2 | pragma solidity ^0.8.13;
3 |
4 | import {Script} from "forge-std/Script.sol";
5 | import {ITIP20} from "tempo-std/interfaces/ITIP20.sol";
6 | import {ITIP20RolesAuth} from "tempo-std/interfaces/ITIP20RolesAuth.sol";
7 | import {StdPrecompiles} from "tempo-std/StdPrecompiles.sol";
8 | import {StdTokens} from "tempo-std/StdTokens.sol";
9 | import {Mail} from "../src/Mail.sol";
10 |
11 | contract MailScript is Script {
12 | function setUp() public {}
13 |
14 | function run() public {
15 | vm.startBroadcast();
16 |
17 | // StdPrecompiles.TIP_FEE_MANAGER.setUserToken(StdTokens.ALPHA_USD_ADDRESS);
18 |
19 | // ITIP20 token =
20 | // ITIP20(StdPrecompiles.TIP20_FACTORY.createToken("testUSD", "tUSD", "USD", StdTokens.PATH_USD, msg.sender));
21 |
22 | // ITIP20RolesAuth(address(token)).grantRole(token.ISSUER_ROLE(), msg.sender);
23 |
24 | // token.mint(msg.sender, 1_000_000 * 10 ** token.decimals());
25 |
26 | ITIP20 token = ITIP20(StdTokens.PATH_USD);
27 | new Mail(token);
28 |
29 | vm.stopBroadcast();
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/apps/contract-verification/wrangler.jsonc:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://esm.sh/wrangler/config-schema.json",
3 | "name": "contracts",
4 | "compatibility_date": "2025-10-11",
5 | "compatibility_flags": ["nodejs_compat"],
6 | "main": "./src/index.tsx",
7 | "keep_vars": true,
8 | "workers_dev": true,
9 | "preview_urls": true,
10 | "d1_databases": [
11 | {
12 | "binding": "CONTRACTS_DB",
13 | "migrations_dir": "drizzle",
14 | "database_name": "CONTRACTS-DB",
15 | "database_id": "9e8d88e6-8bf0-40ae-860a-c1385dccaefc"
16 | }
17 | ],
18 | "containers": [
19 | {
20 | "max_instances": 10,
21 | "image": "./Dockerfile",
22 | "name": "verification-container",
23 | "class_name": "VerificationContainer"
24 | }
25 | ],
26 | "durable_objects": {
27 | "bindings": [
28 | {
29 | "name": "VERIFICATION_CONTAINER",
30 | "class_name": "VerificationContainer"
31 | }
32 | ]
33 | },
34 | "migrations": [
35 | {
36 | "tag": "v1",
37 | "new_sqlite_classes": ["VerificationContainer"]
38 | }
39 | ],
40 | "observability": {
41 | "enabled": true,
42 | "logs": {
43 | "enabled": true,
44 | "persist": true,
45 | "head_sampling_rate": 1,
46 | "invocation_logs": true
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/apps/contract-verification/src/utilities.ts:
--------------------------------------------------------------------------------
1 | import type { Context } from 'hono'
2 | import type { ContentfulStatusCode } from 'hono/utils/http-status'
3 |
4 | /**
5 | * Normalize absolute source paths to relative paths.
6 | * Extracts the portion after common patterns like /src/, /contracts/, /lib/
7 | * Falls back to filename if no pattern matches.
8 | */
9 | export function normalizeSourcePath(absolutePath: string) {
10 | if (!absolutePath.startsWith('/')) return absolutePath
11 |
12 | // Common source directory patterns
13 | const patterns = ['/src/', '/contracts/', '/lib/', '/test/', '/script/']
14 |
15 | for (const pattern of patterns) {
16 | const index = absolutePath.lastIndexOf(pattern)
17 | if (index !== -1) return absolutePath.slice(index + 1) // +1 to remove leading slash
18 | }
19 |
20 | // Fallback: just use the filename
21 | const parts = absolutePath.split('/')
22 | return parts[parts.length - 1] ?? absolutePath
23 | }
24 |
25 | export function sourcifyError(
26 | context: Context,
27 | status: ContentfulStatusCode,
28 | customCode: string,
29 | message: string,
30 | ) {
31 | return context.json(
32 | {
33 | message,
34 | customCode,
35 | errorId: globalThis.crypto.randomUUID(),
36 | },
37 | status,
38 | )
39 | }
40 |
--------------------------------------------------------------------------------
/apps/contract-verification/vite.config.ts:
--------------------------------------------------------------------------------
1 | import NodeChildProcess from 'node:child_process'
2 | import NodeProcess from 'node:process'
3 | import { cloudflare } from '@cloudflare/vite-plugin'
4 | import { defineConfig, loadEnv } from 'vite'
5 |
6 | const commitSha =
7 | NodeChildProcess.execSync('git rev-parse --short HEAD').toString().trim() ||
8 | NodeProcess.env.CF_PAGES_COMMIT_SHA?.slice(0, 7)
9 |
10 | const [, , , ...args] = NodeProcess.argv
11 |
12 | export default defineConfig((config) => {
13 | const env = loadEnv(config.mode, process.cwd(), '')
14 |
15 | const lastPort = (() => {
16 | const index = args.lastIndexOf('--port')
17 | return index === -1 ? null : (args.at(index + 1) ?? null)
18 | })()
19 | const port = Number(lastPort ?? env.PORT ?? 3_000)
20 |
21 | return {
22 | plugins: [cloudflare()],
23 | server: {
24 | port,
25 | cors: config.mode === 'development' ? true : undefined,
26 | allowedHosts: config.mode === 'development' ? true : undefined,
27 | },
28 | define: {
29 | __BASE_URL__: JSON.stringify(
30 | config.mode === 'development'
31 | ? `http://localhost:${port}`
32 | : (env.VITE_BASE_URL ?? ''),
33 | ),
34 | __BUILD_VERSION__: JSON.stringify(commitSha ?? Date.now().toString()),
35 | },
36 | build: {
37 | copyPublicDir: true,
38 | rolldownOptions: {
39 | output: { minify: true },
40 | },
41 | },
42 | }
43 | })
44 |
--------------------------------------------------------------------------------
/apps/contract-verification/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "schema": "https://json.schemastore.org/tsconfig.json",
3 | "compilerOptions": {
4 | "strict": true,
5 | "noEmit": true,
6 | "allowJs": true,
7 | "checkJs": true,
8 | "lib": ["DOM", "ESNext"],
9 | "target": "ESNext",
10 | "module": "ESNext",
11 | "jsx": "react-jsx",
12 | "skipLibCheck": true,
13 | "alwaysStrict": true,
14 | "esModuleInterop": true,
15 | "isolatedModules": true,
16 | "strictNullChecks": true,
17 | "resolveJsonModule": true,
18 | "verbatimModuleSyntax": true,
19 | "jsxImportSource": "hono/jsx",
20 | "moduleResolution": "Bundler",
21 | "useDefineForClassFields": true,
22 | "allowArbitraryExtensions": true,
23 | "noUncheckedIndexedAccess": true,
24 | "resolvePackageJsonImports": true,
25 | "resolvePackageJsonExports": true,
26 | "useUnknownInCatchVariables": true,
27 | "allowImportingTsExtensions": true,
28 | "noFallthroughCasesInSwitch": true,
29 | "allowSyntheticDefaultImports": true,
30 | "forceConsistentCasingInFileNames": true,
31 | "paths": {
32 | "#*": ["./src/*"]
33 | },
34 | "types": ["node", "@cloudflare/workers-types"]
35 | },
36 | "include": ["src/**/*", "scripts/**/*", "./container/**/*"],
37 | "files": [
38 | "env.d.ts",
39 | "reset.d.ts",
40 | "vite.config.ts",
41 | "drizzle.config.ts",
42 | "worker-configuration.d.ts"
43 | ],
44 | "exclude": ["_", "dist", "node_modules"]
45 | }
46 |
--------------------------------------------------------------------------------
/apps/contract-verification/src/route.docs.tsx:
--------------------------------------------------------------------------------
1 | import type { createApiReference } from '@scalar/api-reference'
2 | import { Hono } from 'hono'
3 | import { html, raw } from 'hono/html'
4 |
5 | export const docsRoute = new Hono<{ Bindings: Cloudflare.Env }>()
6 |
7 | const scalarConfig = {
8 | slug: 'contracts',
9 | hideModels: true,
10 | sources: [
11 | {
12 | url: 'https://sourcify.dev/server/api-docs/swagger.json',
13 | default: false,
14 | },
15 | { url: '/openapi.json', default: true },
16 | ],
17 | theme: 'default',
18 | telemetry: false,
19 | hideClientButton: true,
20 | showDeveloperTools: 'never',
21 | documentDownloadType: 'json',
22 | operationTitleSource: 'path',
23 | title: 'Contract Verification API Reference',
24 | favicon: 'https://explore.tempo.xyz/favicon.ico',
25 | // customCss: /* css */ ``,
26 | } satisfies Parameters<typeof createApiReference>[1]
27 |
28 | const Docs = () => {
29 | return (
30 | <html lang="en">
31 | <head>
32 | <title>Contract Verification API</title>
33 | <meta charset="utf-8" />
34 | <meta name="viewport" content="width=device-width, initial-scale=1" />
35 | </head>
36 | <body>
37 | <main id="app"></main>
38 | <script src="https://cdn.jsdelivr.net/npm/@scalar/api-reference"></script>
39 | <script>{html /* js */`Scalar.createApiReference('#app', ${raw(JSON.stringify(scalarConfig))})`}</script>
40 | </body>
41 | </html>
42 | )
43 | }
44 |
45 | docsRoute.get('/', (context) => context.html(<Docs />))
46 |
--------------------------------------------------------------------------------
/apps/contract-verification/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "contracts",
3 | "version": "0.0.1",
4 | "type": "module",
5 | "imports": {
6 | "#*": "./src/*",
7 | "#openapi.json": "./openapi.json",
8 | "#package.json": "./package.json"
9 | },
10 | "scripts": {
11 | "dev": "vite dev",
12 | "preview": "vite preview",
13 | "build": "vite build",
14 | "check": "biome check . --write --unsafe",
15 | "check:types": "tsgo --project tsconfig.json --noEmit",
16 | "gen:types": "test -f .env || cp .env.example .env; wrangler types",
17 | "postinstall": "pnpm gen:types",
18 | "tail": "wrangler tail",
19 | "db:generate": "drizzle-kit generate",
20 | "db:migrate": "wrangler d1 migrations apply CONTRACTS-DB --local",
21 | "db:migrate:remote": "wrangler d1 migrations apply CONTRACTS-DB --remote",
22 | "db:studio": "drizzle-kit studio --config='drizzle.config.ts'",
23 | "db:push": "drizzle-kit push --config='drizzle.config.ts'"
24 | },
25 | "dependencies": {
26 | "@cloudflare/containers": "^0.0.31",
27 | "cbor-x": "^1.6.0",
28 | "drizzle-orm": "^0.45.1",
29 | "hono": "catalog:",
30 | "ox": "catalog:",
31 | "semver": "^7.7.3",
32 | "tempo.ts": "catalog:",
33 | "viem": "catalog:"
34 | },
35 | "devDependencies": {
36 | "@biomejs/biome": "catalog:",
37 | "@cloudflare/vite-plugin": "catalog:",
38 | "@cloudflare/workers-types": "catalog:",
39 | "@libsql/client": "^0.15.15",
40 | "@scalar/api-reference": "catalog:",
41 | "@total-typescript/ts-reset": "catalog:",
42 | "@types/bun": "^1.3.4",
43 | "@types/node": "catalog:",
44 | "@types/semver": "^7.7.1",
45 | "dbmate": "^2.28.0",
46 | "drizzle-kit": "^0.31.8",
47 | "typescript": "catalog:",
48 | "vite": "catalog:",
49 | "wrangler": "catalog:"
50 | },
51 | "license": "MIT"
52 | }
53 |
--------------------------------------------------------------------------------
/apps/contract-verification/README.md:
--------------------------------------------------------------------------------
1 | # Tempo Contract Verification Service
2 |
3 | [contracts.tempo.xyz/docs](https://contracts.tempo.xyz/docs)
4 |
5 | Sourcify-compatible smart contract verification service. Currently supports Tempo Testnet and Devnet.
6 |
7 | ## Architecture
8 |
9 | ```mermaid
10 | graph LR
11 | Client["Client"]
12 | Worker["Worker<br/>Hono Routes"]
13 | Container["Container<br/>Solc"]
14 | D1["D1<br/>SQLite"]
15 |
16 | Client -->|HTTP| Worker
17 | Worker -->|compile| Container
18 | Worker -->|query/write| D1
19 | Worker -->|response| Client
20 |
21 | style Worker fill:#2563eb,color:#fff
22 | style Container fill:#8b5cf6,color:#fff
23 | style D1 fill:#f59e0b,color:#fff
24 | ```
25 |
26 | ## API Endpoints
27 |
28 | ### Verification
29 |
30 | - `POST /v2/verify/:chainId/:address` - Verify contract with source code
31 | - `GET /v2/verify/:verificationId` - Check verification status
32 |
33 | ### Lookup
34 |
35 | - `GET /v2/contract/:chainId/:address` - Get verified contract details
36 | - `GET /v2/contract/all-chains/:address` - Find contract across all chains
37 | - `GET /v2/contracts/:chainId` - List all verified contracts on a chain
38 |
39 | ### Usage
40 |
41 | #### With [Foundry](https://getfoundry.sh)
42 |
43 | Pass the API URL to the `--verifier-url` flag and set `--verifier` to `sourcify`:
44 |
45 | ```bash
46 | forge script script/Mail.s.sol --verifier-url https://contracts.tempo.xyz --verifier sourcify
47 | ```
48 |
49 | See [/apps/contract-verification/scripts/quick-verify.sh](./scripts/quick-verify.sh)
50 | and [/apps/contract-verification/scripts/medium-verify.sh](./scripts/medium-verify.sh) for small examples you can run.
51 |
52 | #### Direct API Usage
53 |
54 | - Standard JSON: see [/apps/contract-verification/scripts/verify-with-curl.sh](./scripts/verify-with-curl.sh) for a full example.
55 |
--------------------------------------------------------------------------------
/apps/contract-verification/src/index.tsx:
--------------------------------------------------------------------------------
1 | import { getContainer } from '@cloudflare/containers'
2 | import { Hono } from 'hono'
3 | import { showRoutes } from 'hono/dev'
4 | import { prettyJSON } from 'hono/pretty-json'
5 | import { requestId } from 'hono/request-id'
6 | import { timeout } from 'hono/timeout'
7 |
8 | import { sourcifyChains } from '#chains.ts'
9 | import { VerificationContainer } from '#container.ts'
10 | import OpenApiSpec from '#openapi.json' with { type: 'json' }
11 | import packageJSON from '#package.json' with { type: 'json' }
12 | import { docsRoute } from '#route.docs.tsx'
13 | import { lookupAllChainContractsRoute, lookupRoute } from '#route.lookup.ts'
14 | import { verifyRoute } from '#route.verify.ts'
15 | import { legacyVerifyRoute } from '#route.verify-legacy.ts'
16 |
17 | export { VerificationContainer }
18 |
19 | /**
20 | * TODO:
21 | * - CORS,
22 | * - Cache,
23 | * - Security
24 | * - Rate limiting,
25 | */
26 |
27 | const app = new Hono<{ Bindings: Cloudflare.Env }>()
28 |
29 | // @note: order matters
30 | app.use('*', requestId({ headerName: 'X-Tempo-Request-Id' }))
31 | // TODO: update before merging to main
32 | app.use('*', timeout(20_000)) // 20 seconds
33 | app.use(prettyJSON())
34 |
35 | app.route('/docs', docsRoute)
36 | app.route('/verify', legacyVerifyRoute)
37 | app.route('/v2/verify', verifyRoute)
38 | app.route('/v2/contract', lookupRoute)
39 | app.route('/v2/contracts', lookupAllChainContractsRoute)
40 |
41 | app
42 | .get('/health', (context) => context.text('ok'))
43 | .get('/', (context) => context.redirect('/docs'))
44 | // TODO: match sourcify `https://sourcify.dev/server/chains` response schema
45 | .get('/chains', (context) => context.json(sourcifyChains))
46 | .get('/version', async (context) =>
47 | context.json({
48 | version: packageJSON.version,
49 | gitCommitHash: __BUILD_VERSION__,
50 | }),
51 | )
52 | .get('/openapi.json', (context) => context.json(OpenApiSpec))
53 | .get('/ping-container', async (context) =>
54 | getContainer(context.env.VERIFICATION_CONTAINER, 'singleton')
55 | .fetch(new Request('http://container/health'))
56 | .then((response) =>
57 | response.ok
58 | ? context.json({ message: 'ok' })
59 | : context.json({ error: 'Failed to ping container' }, 500),
60 | ),
61 | )
62 |
63 | app.use('*', async (context, next) => {
64 | if (context.env.NODE_ENV !== 'development') return await next()
65 | console.info(`[${context.req.method}] ${context.req.path}`)
66 | await next()
67 | })
68 |
69 | showRoutes(app)
70 |
71 | export default app satisfies ExportedHandler<Cloudflare.Env>
72 |
--------------------------------------------------------------------------------
/apps/contract-verification/scripts/tempo-check.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | echo -e "\n=== INIT TEMPO PROJECT ==="
6 | tmp_dir=$(mktemp -d)
7 | cd "$tmp_dir"
8 | forge init -n tempo tempo-check
9 | cd tempo-check
10 |
11 | TEMPO_RPC_URL=https://rpc-orchestra.testnet.tempo.xyz
12 |
13 | echo -e "\n=== FORGE TEST (LOCAL) ==="
14 | forge test
15 |
16 | echo -e "\n=== FORGE SCRIPT (LOCAL) ==="
17 | forge script script/Mail.s.sol
18 |
19 | echo -e "\n=== START TEMPO FORK TESTS ==="
20 |
21 | echo -e "\n=== TEMPO VERSION ==="
22 | cast client --rpc-url $TEMPO_RPC_URL
23 |
24 | echo -e "\n=== FORGE TEST (FORK) ==="
25 | forge test --rpc-url $TEMPO_RPC_URL
26 |
27 | echo -e "\n=== FORGE SCRIPT (FORK) ==="
28 | rm script/Mail.s.sol
29 | cp ~/dev/tempo/tempo-app/apps/contract-verification/scripts/Mail.s.sol script/Mail.s.sol
30 | forge script script/Mail.s.sol --rpc-url $TEMPO_RPC_URL
31 |
32 | echo -e "\n=== CREATE AND FUND ADDRESS ==="
33 | read -r ADDR PK < <(cast wallet new --json | jq -r '.[0] | "\(.address) \(.private_key)"')
34 |
35 | for i in {1..100}; do
36 | OUT=$(cast rpc tempo_fundAddress "$ADDR" --rpc-url "$TEMPO_RPC_URL" 2>&1 || true)
37 |
38 | if echo "$OUT" | jq -e 'arrays' >/dev/null 2>&1; then
39 | echo "$OUT" | jq
40 | break
41 | fi
42 | echo "[$i] $OUT"
43 | sleep 0.2
44 | done
45 |
46 | printf "\naddress: %s\nprivate_key: %s\n" "$ADDR" "$PK"
47 |
48 | echo -e "\n=== WAIT FOR BLOCKS TO MINE ==="
49 | sleep 5
50 | VERIFIER_URL="${VERIFIER_URL:-http://localhost:22222}"
51 |
52 | # If `VERIFIER_URL` is set, add the `--verify` flag to forge commands.
53 | VERIFY_ARGS=()
54 | if [[ -n "${VERIFIER_URL:-}" ]]; then
55 | VERIFY_ARGS+=(--verify --retries 10 --delay 10 --verifier sourcify --verifier-url "$VERIFIER_URL")
56 | fi
57 |
58 | echo -e "\n=== FORGE SCRIPT DEPLOY ==="
59 | forge script script/Mail.s.sol --private-key "$PK" --rpc-url $TEMPO_RPC_URL --broadcast ${VERIFY_ARGS[@]+"${VERIFY_ARGS[@]}"}
60 |
61 | echo -e "\n=== FORGE SCRIPT DEPLOY WITH FEE TOKEN ==="
62 | forge script --fee-token 2 script/Mail.s.sol --private-key "$PK" --rpc-url $TEMPO_RPC_URL --broadcast ${VERIFY_ARGS[@]+"${VERIFY_ARGS[@]}"}
63 | forge script --fee-token 3 script/Mail.s.sol --private-key "$PK" --rpc-url $TEMPO_RPC_URL --broadcast ${VERIFY_ARGS[@]+"${VERIFY_ARGS[@]}"}
64 |
65 | echo -e "\n=== FORGE CREATE DEPLOY ==="
66 | forge create src/Mail.sol:Mail --private-key "$PK" --rpc-url $TEMPO_RPC_URL --broadcast ${VERIFY_ARGS[@]+"${VERIFY_ARGS[@]}"} --constructor-args 0x20c0000000000000000000000000000000000000
67 |
68 | echo -e "\n=== FORGE CREATE DEPLOY WITH FEE TOKEN ==="
69 | forge create --fee-token 2 src/Mail.sol:Mail --private-key "$PK" --rpc-url $TEMPO_RPC_URL --broadcast ${VERIFY_ARGS[@]+"${VERIFY_ARGS[@]}"} --constructor-args 0x20c0000000000000000000000000000000000000
70 | forge create --fee-token 3 src/Mail.sol:Mail --private-key "$PK" --rpc-url $TEMPO_RPC_URL --broadcast ${VERIFY_ARGS[@]+"${VERIFY_ARGS[@]}"} --constructor-args 0x20c0000000000000000000000000000000000000
71 |
72 | echo -e "\n=== CAST ERC20 TRANSFER WITH FEE TOKEN ==="
73 | cast erc20 transfer --fee-token 2 0x20c0000000000000000000000000000000000002 0x4ef5DFf69C1514f4Dbf85aA4F9D95F804F64275F 123456 --rpc-url $TEMPO_RPC_URL --private-key "$PK"
74 | cast erc20 transfer --fee-token 3 0x20c0000000000000000000000000000000000002 0x4ef5DFf69C1514f4Dbf85aA4F9D95F804F64275F 123456 --rpc-url $TEMPO_RPC_URL --private-key "$PK"
75 |
76 | echo -e "\n=== CAST ERC20 APPROVE WITH FEE TOKEN ==="
77 | cast erc20 approve --fee-token 2 0x20c0000000000000000000000000000000000002 0x4ef5DFf69C1514f4Dbf85aA4F9D95F804F64275F 123456 --rpc-url $TEMPO_RPC_URL --private-key "$PK"
78 | cast erc20 approve --fee-token 3 0x20c0000000000000000000000000000000000002 0x4ef5DFf69C1514f4Dbf85aA4F9D95F804F64275F 123456 --rpc-url $TEMPO_RPC_URL --private-key "$PK"
79 |
80 | echo -e "\n=== CAST SEND WITH FEE TOKEN ==="
81 | cast send --fee-token 2 --rpc-url $TEMPO_RPC_URL 0x86A2EE8FAf9A840F7a2c64CA3d51209F9A02081D 'increment()' --private-key "$PK"
82 | cast send --fee-token 3 --rpc-url $TEMPO_RPC_URL 0x86A2EE8FAf9A840F7a2c64CA3d51209F9A02081D 'increment()' --private-key "$PK"
83 |
84 | echo -e "\n=== CAST MKTX WITH FEE TOKEN ==="
85 | cast mktx --fee-token 2 --rpc-url $TEMPO_RPC_URL 0x86A2EE8FAf9A840F7a2c64CA3d51209F9A02081D 'increment()' --private-key "$PK"
86 | cast mktx --fee-token 3 --rpc-url $TEMPO_RPC_URL 0x86A2EE8FAf9A840F7a2c64CA3d51209F9A02081D 'increment()' --private-key "$PK"
87 |
--------------------------------------------------------------------------------
/apps/contract-verification/container/compiler.ts:
--------------------------------------------------------------------------------
1 | import * as NodeFS from 'node:fs/promises'
2 |
3 | const GITHUB_BASE_URL = 'https://github.com'
4 |
5 | const SOLC_CACHE_DIR = '/tmp/solc-cache'
6 | const SOLC_BINARIES_URL = 'https://binaries.soliditylang.org'
7 | const SOLC_GITHUB_RELEASES_URL = `${GITHUB_BASE_URL}/argotorg/solidity/releases/download`
8 |
9 | const VYPER_CACHE_DIR = '/tmp/vyper-cache'
10 | const VYPER_GITHUB_RELEASES_URL = `${GITHUB_BASE_URL}/vyperlang/vyper/releases/download`
11 |
12 | export async function getSolcPath(requestedVersion: string) {
13 | await NodeFS.mkdir(SOLC_CACHE_DIR, { recursive: true })
14 |
15 | // Sanitize the version string (semver: i.e. `0.8.26` or `0.8.26+commit.XXXXXXX`)
16 | const match = requestedVersion.match(
17 | /^0\.\d+\.\d+(?:\+commit\.[0-9a-f]{8})?$/,
18 | )
19 | if (!match)
20 | throw new Error(`Unsupported compilerVersion: ${requestedVersion}`)
21 |
22 | const [version] = match
23 | const [tagVersion] = version.split('+')
24 |
25 | const fsPath = `${SOLC_CACHE_DIR}/solc-${version}`
26 |
27 | // 2. If we already have it, reuse
28 | try {
29 | const stat = await Bun.file(fsPath).stat()
30 | if (stat?.size && stat.size > 0) return fsPath
31 | } catch {
32 | // fall through to download
33 | }
34 |
35 | // 3. Download solc: try GitHub releases first, then fall back to solc-bin mirror.
36 | // GitHub: https://github.com/argotorg/solidity/releases/download/v${tagVersion}/solc-static-linux
37 | // Mirror: https://binaries.soliditylang.org/linux-amd64/solc-linux-amd64-v${version}
38 | let response: Response
39 |
40 | const githubUrl = `${SOLC_GITHUB_RELEASES_URL}/v${tagVersion}/solc-static-linux`
41 | response = await fetch(githubUrl)
42 |
43 | if (!response.ok) {
44 | console.warn(
45 | `[solc] GitHub download failed for ${version} (${githubUrl}): ${response.status}`,
46 | )
47 |
48 | const binariesUrl = `${SOLC_BINARIES_URL}/linux-amd64/solc-linux-amd64-v${version}`
49 | const fallbackResponse = await fetch(binariesUrl)
50 |
51 | if (!fallbackResponse.ok) {
52 | throw new Error(
53 | `Failed to download solc ${version}: GitHub ${response.status}, binaries.soliditylang.org ${fallbackResponse.status}`,
54 | )
55 | }
56 |
57 | response = fallbackResponse
58 | }
59 |
60 | const bytes = new Uint8Array(await response.arrayBuffer())
61 |
62 | await Bun.write(fsPath, bytes)
63 | await NodeFS.chmod(fsPath, 0o755)
64 |
65 | return fsPath
66 | }
67 |
68 | export async function getVyperPath(requestedVersion: string) {
69 | await NodeFS.mkdir(VYPER_CACHE_DIR, { recursive: true })
70 |
71 | // Sanitize the version string
72 | // Vyper versions can be: 0.3.10, 0.4.0, 0.4.1, v0.3.10, 0.3.10+commit.XXXXXXX
73 | const cleaned = requestedVersion.replace(/^v/, '')
74 | const match = cleaned.match(/^(\d+\.\d+\.\d+)(?:\+commit\.[0-9a-f]+)?$/)
75 | if (!match) throw new Error(`Unsupported Vyper version: ${requestedVersion}`)
76 |
77 | const [, version] = match
78 | const fsPath = `${VYPER_CACHE_DIR}/vyper-${version}`
79 |
80 | // If we already have it, reuse
81 | try {
82 | const stat = await Bun.file(fsPath).stat()
83 | if (stat?.size && stat.size > 0) return fsPath
84 | } catch {
85 | // fall through to download
86 | }
87 |
88 | // Download vyper from GitHub releases
89 | // The naming varies by version, so we fetch the release assets to find the correct file
90 | const releaseApiUrl = `https://api.github.com/repos/vyperlang/vyper/releases/tags/v${version}`
91 | const releaseResponse = await fetch(releaseApiUrl, {
92 | headers: { Accept: 'application/vnd.github.v3+json' },
93 | })
94 |
95 | let response: Response | null = null
96 |
97 | if (releaseResponse.ok) {
98 | const release = (await releaseResponse.json()) as {
99 | assets: Array<{ name: string; browser_download_url: string }>
100 | }
101 | const linuxAsset = release.assets.find(
102 | (a) => a.name.endsWith('.linux') && a.name.startsWith(`vyper.${version}`),
103 | )
104 | if (linuxAsset) {
105 | response = await fetch(linuxAsset.browser_download_url)
106 | }
107 | }
108 |
109 | // Fallback to simple pattern for older versions
110 | if (!response?.ok) {
111 | const simpleUrl = `${VYPER_GITHUB_RELEASES_URL}/v${version}/vyper.${version}.linux`
112 | response = await fetch(simpleUrl)
113 | }
114 |
115 | if (!response?.ok)
116 | throw new Error(
117 | `Failed to download Vyper ${version}: release not found or no linux binary available`,
118 | )
119 |
120 | const bytes = new Uint8Array(await response.arrayBuffer())
121 |
122 | await Bun.write(fsPath, bytes)
123 | await NodeFS.chmod(fsPath, 0o755)
124 |
125 | return fsPath
126 | }
127 |
--------------------------------------------------------------------------------
/apps/contract-verification/container/index.ts:
--------------------------------------------------------------------------------
1 | import * as Bun from 'bun'
2 |
3 | import { getSolcPath, getVyperPath } from './compiler.ts'
4 |
5 | const headers = new Headers({
6 | 'X-Request-Id': Bun.randomUUIDv7(),
7 | })
8 |
9 | const server = Bun.serve({
10 | port: 80_80,
11 | development: Bun.env.NODE_ENV === 'development',
12 | routes: {
13 | '/compile': {
14 | POST: async (request, server) => {
15 | const address = server.requestIP(request as Request)
16 | if (address)
17 | console.info(
18 | `[/compile] request IP address: ${address.address}:${address.port}`,
19 | )
20 |
21 | const body = await request.json<{
22 | input: object
23 | compilerVersion: string
24 | }>()
25 | if (!Object.hasOwn(body, 'input'))
26 | return Response.json({ error: 'Missing input' }, { status: 400 })
27 |
28 | if (!Object.hasOwn(body, 'compilerVersion'))
29 | return Response.json(
30 | { error: 'Missing compilerVersion' },
31 | { status: 400 },
32 | )
33 |
34 | const solcPath = await getSolcPath(body.compilerVersion)
35 |
36 | // solc --standard-json reads from stdin
37 | const proc = Bun.spawn([solcPath, '--standard-json'], {
38 | stdin: new TextEncoder().encode(JSON.stringify(body.input)),
39 | stdout: 'pipe',
40 | stderr: 'pipe',
41 | })
42 |
43 | const stdout = await new Response(proc.stdout).text()
44 | const stderr = await new Response(proc.stderr).text()
45 | await proc.exited
46 |
47 | if (stderr) console.error('[compile] stderr:', stderr)
48 |
49 | if (!stdout)
50 | return Response.json(
51 | { error: 'Failed to compile', stderr },
52 | { status: 500 },
53 | )
54 |
55 | try {
56 | const output = JSON.parse(stdout)
57 | return Response.json(output, { status: 200 })
58 | } catch (error) {
59 | console.error('[compile] Failed to parse solc output:', error)
60 | return Response.json(
61 | { error: 'Failed to parse solc output', stdout, stderr },
62 | { status: 500 },
63 | )
64 | }
65 | },
66 | },
67 | '/compile/vyper': {
68 | POST: async (request, server) => {
69 | const address = server.requestIP(request as Request)
70 | if (address)
71 | console.info(
72 | `[/compile/vyper] request IP address: ${address.address}:${address.port}`,
73 | )
74 |
75 | const body = await request.json<{
76 | input: object
77 | compilerVersion: string
78 | }>()
79 | if (!Object.hasOwn(body, 'input'))
80 | return Response.json({ error: 'Missing input' }, { status: 400 })
81 |
82 | if (!Object.hasOwn(body, 'compilerVersion'))
83 | return Response.json(
84 | { error: 'Missing compilerVersion' },
85 | { status: 400 },
86 | )
87 |
88 | const vyperPath = await getVyperPath(body.compilerVersion)
89 |
90 | // vyper --standard-json reads from stdin
91 | const proc = Bun.spawn([vyperPath, '--standard-json'], {
92 | stdin: new TextEncoder().encode(JSON.stringify(body.input)),
93 | stdout: 'pipe',
94 | stderr: 'pipe',
95 | })
96 |
97 | const stdout = await new Response(proc.stdout).text()
98 | const stderr = await new Response(proc.stderr).text()
99 | await proc.exited
100 |
101 | // Vyper < 0.4.0 outputs warnings to stderr, so only log if it looks like an error
102 | if (stderr && !stderr.includes('Warning'))
103 | console.error('[compile/vyper] stderr:', stderr)
104 |
105 | if (!stdout)
106 | return Response.json(
107 | { error: 'Failed to compile', stderr },
108 | { status: 500 },
109 | )
110 |
111 | try {
112 | const output = JSON.parse(stdout)
113 | return Response.json(output, { status: 200 })
114 | } catch (error) {
115 | console.error('[compile/vyper] Failed to parse vyper output:', error)
116 | return Response.json(
117 | { error: 'Failed to parse vyper output', stdout, stderr },
118 | { status: 500 },
119 | )
120 | }
121 | },
122 | },
123 | '/health': new Response('ok'),
124 | '/metrics': (_, server) =>
125 | new Response(`Active requests: ${server.pendingRequests}`),
126 | },
127 | error: (error) => {
128 | console.error(Bun.color('red', 'ansi'), JSON.stringify(error, undefined, 2))
129 | const errorMessage =
130 | error instanceof Error ? error.message : 'Unknown error'
131 | return new Response(errorMessage, { status: 500, headers })
132 | },
133 | })
134 |
135 | if (Bun.env.NODE_ENV === 'development')
136 | console.info(
137 | `Server is running on`,
138 | Bun.color('#4DFA7B', 'ansi'),
139 | server.url.toString().replaceAll(`${server.port}/`, `${server.port}`),
140 | )
141 | else console.info(`Server started on port ${server.port}`)
142 |
--------------------------------------------------------------------------------
/apps/contract-verification/scripts/verify-with-curl.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | PORT="${PORT:-22222}"
6 | URL="${URL:-http://localhost:${PORT}}"
7 | CHAIN_ID="${CHAIN_ID:-42429}"
8 | ADDRESS="${ADDRESS:-0x6c12eB13Ec6C8AC4EaF16CAf4c0c2141386c4c26}"
9 |
10 | echo "Verifying contract $ADDRESS"
11 | echo "on chain $CHAIN_ID"
12 | echo "verify API is running on ${URL}"
13 | echo
14 |
15 | curl --silent \
16 | --request POST \
17 | --url "${URL}/v2/verify/${CHAIN_ID}/${ADDRESS}" \
18 | --header 'Content-Type: application/json' \
19 | --data '{
20 | "stdJsonInput": {
21 | "language": "Solidity",
22 | "sources": {
23 | "src/Mail.sol": {
24 | "content": "// SPDX-License-Identifier: UNLICENSED\npragma solidity ^0.8.13;\n\nimport {ITIP20} from \"tempo-std/interfaces/ITIP20.sol\";\n\ncontract Mail {\n event MailSent(address indexed from, address indexed to, string message, Attachment attachment);\n\n struct Attachment {\n uint256 amount;\n bytes32 memo;\n }\n\n ITIP20 public token;\n\n constructor(ITIP20 token_) {\n token = token_;\n }\n\n function sendMail(address to, string memory message, Attachment memory attachment) external {\n token.transferFromWithMemo(msg.sender, to, attachment.amount, attachment.memo);\n\n emit MailSent(msg.sender, to, message, attachment);\n }\n}"
25 | },
26 | "tempo-std/interfaces/ITIP20.sol": {
27 | "content": "// SPDX-License-Identifier: MIT\npragma solidity ^0.8.13;\n\ninterface ITIP20 {\n error ContractPaused();\n error InsufficientAllowance();\n error InsufficientBalance(uint256 currentBalance, uint256 expectedBalance, address);\n error InvalidAmount();\n error InvalidCurrency();\n error InvalidQuoteToken();\n error InvalidBaseToken();\n error InvalidToken();\n error InvalidRecipient();\n error InvalidSupplyCap();\n error NoOptedInSupply();\n error ScheduledRewardsDisabled();\n error PolicyForbids();\n error ProtectedAddress();\n error SupplyCapExceeded();\n event Approval(address indexed owner, address indexed spender, uint256 amount);\n event Burn(address indexed from, uint256 amount);\n event BurnBlocked(address indexed from, uint256 amount);\n event Mint(address indexed to, uint256 amount);\n event NextQuoteTokenSet(address indexed updater, ITIP20 indexed nextQuoteToken);\n event PauseStateUpdate(address indexed updater, bool isPaused);\n event QuoteTokenUpdate(address indexed updater, ITIP20 indexed newQuoteToken);\n event RewardRecipientSet(address indexed holder, address indexed recipient);\n event RewardScheduled(address indexed funder, uint64 indexed id, uint256 amount, uint32 durationSeconds);\n event SupplyCapUpdate(address indexed updater, uint256 indexed newSupplyCap);\n event Transfer(address indexed from, address indexed to, uint256 amount);\n event TransferPolicyUpdate(address indexed updater, uint64 indexed newPolicyId);\n event TransferWithMemo(address indexed from, address indexed to, uint256 amount, bytes32 indexed memo);\n function BURN_BLOCKED_ROLE() external view returns (bytes32);\n function ISSUER_ROLE() external view returns (bytes32);\n function PAUSE_ROLE() external view returns (bytes32);\n function UNPAUSE_ROLE() external view returns (bytes32);\n function allowance(address owner, address spender) external view returns (uint256);\n function approve(address spender, uint256 amount) external returns (bool);\n function balanceOf(address account) external view returns (uint256);\n function burn(uint256 amount) external;\n function burnBlocked(address from, uint256 amount) external;\n function burnWithMemo(uint256 amount, bytes32 memo) external;\n function changeTransferPolicyId(uint64 newPolicyId) external;\n function claimRewards() external returns (uint256 maxAmount);\n function completeQuoteTokenUpdate() external;\n function currency() external view returns (string memory);\n function decimals() external pure returns (uint8);\n function globalRewardPerToken() external view returns (uint256);\n function mint(address to, uint256 amount) external;\n function mintWithMemo(address to, uint256 amount, bytes32 memo) external;\n function name() external view returns (string memory);\n function nextQuoteToken() external view returns (ITIP20);\n function optedInSupply() external view returns (uint128);\n function pause() external;\n function paused() external view returns (bool);\n function quoteToken() external view returns (ITIP20);\n function setNextQuoteToken(ITIP20 newQuoteToken) external;\n function setRewardRecipient(address newRewardRecipient) external;\n function setSupplyCap(uint256 newSupplyCap) external;\n function startReward(uint256 amount, uint32 seconds_) external returns (uint64);\n function supplyCap() external view returns (uint256);\n function symbol() external view returns (string memory);\n function systemTransferFrom(address from, address to, uint256 amount) external returns (bool);\n function totalSupply() external view returns (uint256);\n function transfer(address to, uint256 amount) external returns (bool);\n function transferFeePostTx(address to, uint256 refund, uint256 actualUsed) external;\n function transferFeePreTx(address from, uint256 amount) external;\n function transferFrom(address from, address to, uint256 amount) external returns (bool);\n function transferFromWithMemo(address from, address to, uint256 amount, bytes32 memo) external returns (bool);\n function transferPolicyId() external view returns (uint64);\n function transferWithMemo(address to, uint256 amount, bytes32 memo) external;\n function unpause() external;\n function userRewardInfo(address) external view returns (address rewardRecipient, uint256 rewardPerToken, uint256 rewardBalance);\n}"
28 | }
29 | },
30 | "settings": {
31 | "optimizer": { "enabled": false, "runs": 200 },
32 | "outputSelection": { "*": { "*": ["abi", "evm.bytecode", "evm.deployedBytecode"] } },
33 | "evmVersion": "cancun"
34 | }
35 | },
36 | "compilerVersion": "0.8.30",
37 | "contractIdentifier": "src/Mail.sol:Mail"
38 | }'
39 |
--------------------------------------------------------------------------------
/apps/contract-verification/src/database/schema.ts:
--------------------------------------------------------------------------------
1 | import { sql } from 'drizzle-orm'
2 | import { index, sqliteTable, uniqueIndex } from 'drizzle-orm/sqlite-core'
3 |
4 | // ============================================================================
5 | // Helper for common audit columns
6 | // ============================================================================
7 |
8 | const auditColumns = (s: Parameters<Parameters<typeof sqliteTable>[1]>[0]) => ({
9 | createdAt: s.text('created_at').notNull().default(sql`(datetime('now'))`),
10 | updatedAt: s.text('updated_at').notNull().default(sql`(datetime('now'))`),
11 | /** SQLite lacks CURRENT_USER - set from application context */
12 | createdBy: s.text('created_by').notNull(),
13 | /** SQLite lacks CURRENT_USER - set from application context */
14 | updatedBy: s.text('updated_by').notNull(),
15 | })
16 |
17 | // ============================================================================
18 | // code - Stores contract bytecode with content-addressed hashing
19 | // ============================================================================
20 |
21 | /**
22 | * PostgreSQL CHECK constraint (validate at app level):
23 | * (code IS NOT NULL AND code_hash = digest(code, 'sha256')) OR
24 | * (code IS NULL AND code_hash = '\x')
25 | */
26 | export const codeTable = sqliteTable(
27 | 'code',
28 | (s) => ({
29 | /** SHA-256 hash of the code (primary key) */
30 | codeHash: s.blob('code_hash').primaryKey(),
31 | ...auditColumns(s),
32 | /** Keccak-256 hash of the code */
33 | codeHashKeccak: s.blob('code_hash_keccak').notNull(),
34 | /** Contract bytecode (nullable - can be pruned) */
35 | code: s.blob('code'),
36 | }),
37 | (table) => [index('code_code_hash_keccak').on(table.codeHashKeccak)],
38 | )
39 |
40 | // ============================================================================
41 | // sources - Stores source code files
42 | // ============================================================================
43 |
44 | /**
45 | * PostgreSQL CHECK constraint (validate at app level):
46 | * source_hash = digest(content, 'sha256')
47 | */
48 | export const sourcesTable = sqliteTable('sources', (s) => ({
49 | /** SHA-256 hash of the source content (primary key) */
50 | sourceHash: s.blob('source_hash').primaryKey(),
51 | /** Keccak-256 hash of the source content */
52 | sourceHashKeccak: s.blob('source_hash_keccak').notNull(),
53 | /** Source code content */
54 | content: s.text('content').notNull(),
55 | ...auditColumns(s),
56 | }))
57 |
58 | // ============================================================================
59 | // contracts - Represents a contract by its creation/runtime code hashes
60 | // ============================================================================
61 |
62 | export const contractsTable = sqliteTable(
63 | 'contracts',
64 | (s) => ({
65 | /** UUID primary key (generate with crypto.randomUUID()) */
66 | id: s.text('id').primaryKey(),
67 | ...auditColumns(s),
68 | /** FK to code.code_hash (creation bytecode) */
69 | creationCodeHash: s
70 | .blob('creation_code_hash')
71 | .references(() => codeTable.codeHash),
72 | /** FK to code.code_hash (runtime bytecode) */
73 | runtimeCodeHash: s
74 | .blob('runtime_code_hash')
75 | .notNull()
76 | .references(() => codeTable.codeHash),
77 | }),
78 | (table) => [
79 | index('contracts_creation_code_hash').on(table.creationCodeHash),
80 | index('contracts_runtime_code_hash').on(table.runtimeCodeHash),
81 | uniqueIndex('contracts_pseudo_pkey').on(
82 | table.creationCodeHash,
83 | table.runtimeCodeHash,
84 | ),
85 | ],
86 | )
87 |
88 | // ============================================================================
89 | // contract_deployments - Links contracts to on-chain deployments
90 | // ============================================================================
91 |
92 | export const contractDeploymentsTable = sqliteTable(
93 | 'contract_deployments',
94 | (s) => ({
95 | /** UUID primary key */
96 | id: s.text('id').primaryKey(),
97 | ...auditColumns(s),
98 | /** Chain ID (e.g., 1 for mainnet) */
99 | chainId: s.integer('chain_id').notNull(),
100 | /** Contract address (20 bytes) */
101 | address: s.blob('address').notNull(),
102 | /** Transaction hash of deployment */
103 | transactionHash: s.blob('transaction_hash'),
104 | /** Block number of deployment */
105 | blockNumber: s.integer('block_number'),
106 | /** Transaction index within block */
107 | transactionIndex: s.integer('transaction_index'),
108 | /** Deployer address */
109 | deployer: s.blob('deployer'),
110 | /** FK to contracts.id */
111 | contractId: s
112 | .text('contract_id')
113 | .notNull()
114 | .references(() => contractsTable.id),
115 | }),
116 | (table) => [
117 | index('contract_deployments_address').on(table.address),
118 | index('contract_deployments_contract_id').on(table.contractId),
119 | uniqueIndex('contract_deployments_pseudo_pkey').on(
120 | table.chainId,
121 | table.address,
122 | table.transactionHash,
123 | table.contractId,
124 | ),
125 | ],
126 | )
127 |
128 | // ============================================================================
129 | // compiled_contracts - Stores compilation results
130 | // ============================================================================
131 |
132 | export const compiledContractsTable = sqliteTable(
133 | 'compiled_contracts',
134 | (s) => ({
135 | /** UUID primary key */
136 | id: s.text('id').primaryKey(),
137 | ...auditColumns(s),
138 | /** Compiler name (e.g., "solc") */
139 | compiler: s.text('compiler').notNull(),
140 | /** Compiler version (e.g., "0.8.19") */
141 | version: s.text('version').notNull(),
142 | /** Source language (e.g., "Solidity", "Vyper") */
143 | language: s.text('language').notNull(),
144 | /** Contract name */
145 | name: s.text('name').notNull(),
146 | /** Fully qualified name (e.g., "contracts/Token.sol:Token") */
147 | fullyQualifiedName: s.text('fully_qualified_name').notNull(),
148 | /** Compiler settings (JSON) */
149 | compilerSettings: s.text('compiler_settings').notNull(),
150 | /** Compilation artifacts - abi, userdoc, devdoc, sources, storageLayout (JSON) */
151 | compilationArtifacts: s.text('compilation_artifacts').notNull(),
152 | /** FK to code.code_hash (creation bytecode) */
153 | creationCodeHash: s
154 | .blob('creation_code_hash')
155 | .notNull()
156 | .references(() => codeTable.codeHash),
157 | /** Creation code artifacts - sourceMap, linkReferences, cborAuxdata (JSON) */
158 | creationCodeArtifacts: s.text('creation_code_artifacts').notNull(),
159 | /** FK to code.code_hash (runtime bytecode) */
160 | runtimeCodeHash: s
161 | .blob('runtime_code_hash')
162 | .notNull()
163 | .references(() => codeTable.codeHash),
164 | /** Runtime code artifacts - sourceMap, linkReferences, immutableReferences, cborAuxdata (JSON) */
165 | runtimeCodeArtifacts: s.text('runtime_code_artifacts').notNull(),
166 | }),
167 | (table) => [
168 | index('compiled_contracts_creation_code_hash').on(table.creationCodeHash),
169 | index('compiled_contracts_runtime_code_hash').on(table.runtimeCodeHash),
170 | uniqueIndex('compiled_contracts_pseudo_pkey').on(
171 | table.compiler,
172 | table.version,
173 | table.language,
174 | table.creationCodeHash,
175 | table.runtimeCodeHash,
176 | ),
177 | ],
178 | )
179 |
180 | // ============================================================================
181 | // compiled_contracts_sources - Links compilations to source files
182 | // ============================================================================
183 |
184 | export const compiledContractsSourcesTable = sqliteTable(
185 | 'compiled_contracts_sources',
186 | (s) => ({
187 | /** UUID primary key */
188 | id: s.text('id').primaryKey(),
189 | /** FK to compiled_contracts.id */
190 | compilationId: s
191 | .text('compilation_id')
192 | .notNull()
193 | .references(() => compiledContractsTable.id),
194 | /** FK to sources.source_hash */
195 | sourceHash: s
196 | .blob('source_hash')
197 | .notNull()
198 | .references(() => sourcesTable.sourceHash),
199 | /** File path within compilation */
200 | path: s.text('path').notNull(),
201 | }),
202 | (table) => [
203 | index('compiled_contracts_sources_compilation_id').on(table.compilationId),
204 | index('compiled_contracts_sources_source_hash').on(table.sourceHash),
205 | uniqueIndex('compiled_contracts_sources_pseudo_pkey').on(
206 | table.compilationId,
207 | table.path,
208 | ),
209 | ],
210 | )
211 |
212 | // ============================================================================
213 | // signatures - Stores function/event/error signatures
214 | // ============================================================================
215 |
216 | export const signaturesTable = sqliteTable(
217 | 'signatures',
218 | (s) => ({
219 | /** Full 32-byte signature hash (primary key) */
220 | signatureHash32: s.blob('signature_hash_32').primaryKey(),
221 | /** First 4 bytes of signature hash (for function selectors) - generated column */
222 | // Note: SQLite generated columns need raw SQL, handled at migration level
223 | /** Human-readable signature (e.g., "transfer(address,uint256)") */
224 | signature: s.text('signature').notNull(),
225 | createdAt: s.text('created_at').notNull().default(sql`(datetime('now'))`),
226 | }),
227 | (table) => [index('signatures_signature_idx').on(table.signature)],
228 | )
229 |
230 | // ============================================================================
231 | // compiled_contracts_signatures - Links compilations to signatures
232 | // ============================================================================
233 |
234 | /** Signature type enum values */
235 | export type SignatureType = 'function' | 'event' | 'error'
236 |
237 | export const compiledContractsSignaturesTable = sqliteTable(
238 | 'compiled_contracts_signatures',
239 | (s) => ({
240 | /** UUID primary key */
241 | id: s.text('id').primaryKey(),
242 | /** FK to compiled_contracts.id */
243 | compilationId: s
244 | .text('compilation_id')
245 | .notNull()
246 | .references(() => compiledContractsTable.id),
247 | /** FK to signatures.signature_hash_32 */
248 | signatureHash32: s
249 | .blob('signature_hash_32')
250 | .notNull()
251 | .references(() => signaturesTable.signatureHash32),
252 | /** Type: 'function', 'event', or 'error' */
253 | signatureType: s.text('signature_type').notNull().$type<SignatureType>(),
254 | createdAt: s.text('created_at').notNull().default(sql`(datetime('now'))`),
255 | }),
256 | (table) => [
257 | index('compiled_contracts_signatures_signature_idx').on(
258 | table.signatureHash32,
259 | ),
260 | index('compiled_contracts_signatures_type_signature_idx').on(
261 | table.signatureType,
262 | table.signatureHash32,
263 | ),
264 | uniqueIndex('compiled_contracts_signatures_pseudo_pkey').on(
265 | table.compilationId,
266 | table.signatureHash32,
267 | table.signatureType,
268 | ),
269 | ],
270 | )
271 |
272 | // ============================================================================
273 | // verified_contracts - Links deployments to compilations with match info
274 | // ============================================================================
275 |
276 | export const verifiedContractsTable = sqliteTable(
277 | 'verified_contracts',
278 | (s) => ({
279 | /** Auto-increment primary key */
280 | id: s.integer('id').primaryKey({ autoIncrement: true }),
281 | ...auditColumns(s),
282 | /** FK to contract_deployments.id */
283 | deploymentId: s
284 | .text('deployment_id')
285 | .notNull()
286 | .references(() => contractDeploymentsTable.id),
287 | /** FK to compiled_contracts.id */
288 | compilationId: s
289 | .text('compilation_id')
290 | .notNull()
291 | .references(() => compiledContractsTable.id),
292 | /** Whether creation code matched */
293 | creationMatch: s.integer('creation_match', { mode: 'boolean' }).notNull(),
294 | /** Creation match values (JSON) - constructor args, libraries, etc. */
295 | creationValues: s.text('creation_values'),
296 | /** Creation transformations applied (JSON) */
297 | creationTransformations: s.text('creation_transformations'),
298 | /** Whether creation metadata matched exactly */
299 | creationMetadataMatch: s.integer('creation_metadata_match', {
300 | mode: 'boolean',
301 | }),
302 | /** Whether runtime code matched */
303 | runtimeMatch: s.integer('runtime_match', { mode: 'boolean' }).notNull(),
304 | /** Runtime match values (JSON) - libraries, immutables, etc. */
305 | runtimeValues: s.text('runtime_values'),
306 | /** Runtime transformations applied (JSON) */
307 | runtimeTransformations: s.text('runtime_transformations'),
308 | /** Whether runtime metadata matched exactly */
309 | runtimeMetadataMatch: s.integer('runtime_metadata_match', {
310 | mode: 'boolean',
311 | }),
312 | }),
313 | (table) => [
314 | index('verified_contracts_deployment_id').on(table.deploymentId),
315 | index('verified_contracts_compilation_id').on(table.compilationId),
316 | uniqueIndex('verified_contracts_pseudo_pkey').on(
317 | table.compilationId,
318 | table.deploymentId,
319 | ),
320 | ],
321 | )
322 |
323 | // ============================================================================
324 | // verification_jobs - Tracks verification job status
325 | // ============================================================================
326 |
327 | export const verificationJobsTable = sqliteTable(
328 | 'verification_jobs',
329 | (s) => ({
330 | /** UUID primary key */
331 | id: s.text('id').primaryKey(),
332 | /** When verification started */
333 | startedAt: s.text('started_at').notNull().default(sql`(datetime('now'))`),
334 | /** When verification completed (null if still running) */
335 | completedAt: s.text('completed_at'),
336 | /** Chain ID */
337 | chainId: s.integer('chain_id').notNull(),
338 | /** Contract address being verified */
339 | contractAddress: s.blob('contract_address').notNull(),
340 | /** FK to verified_contracts.id (set on success) */
341 | verifiedContractId: s
342 | .integer('verified_contract_id')
343 | .references(() => verifiedContractsTable.id),
344 | /** Error code if verification failed */
345 | errorCode: s.text('error_code'),
346 | /** Error ID for tracking */
347 | errorId: s.text('error_id'),
348 | /** Error details (JSON) */
349 | errorData: s.text('error_data'),
350 | /** API endpoint that initiated verification */
351 | verificationEndpoint: s.text('verification_endpoint').notNull(),
352 | /** Hardware info for debugging */
353 | hardware: s.text('hardware'),
354 | /** Compilation time in milliseconds */
355 | compilationTime: s.integer('compilation_time'),
356 | /** External verification service results (JSON) */
357 | externalVerification: s.text('external_verification'),
358 | }),
359 | (table) => [
360 | index('verification_jobs_chain_id_address_idx').on(
361 | table.chainId,
362 | table.contractAddress,
363 | ),
364 | ],
365 | )
366 |
367 | // ============================================================================
368 | // verification_jobs_ephemeral - Temporary data for verification jobs
369 | // ============================================================================
370 |
371 | export const verificationJobsEphemeralTable = sqliteTable(
372 | 'verification_jobs_ephemeral',
373 | (s) => ({
374 | /** FK to verification_jobs.id (also primary key) */
375 | id: s
376 | .text('id')
377 | .primaryKey()
378 | .references(() => verificationJobsTable.id),
379 | /** Recompiled creation bytecode */
380 | recompiledCreationCode: s.blob('recompiled_creation_code'),
381 | /** Recompiled runtime bytecode */
382 | recompiledRuntimeCode: s.blob('recompiled_runtime_code'),
383 | /** On-chain creation bytecode */
384 | onchainCreationCode: s.blob('onchain_creation_code'),
385 | /** On-chain runtime bytecode */
386 | onchainRuntimeCode: s.blob('onchain_runtime_code'),
387 | /** Creation transaction hash */
388 | creationTransactionHash: s.blob('creation_transaction_hash'),
389 | }),
390 | )
391 |
--------------------------------------------------------------------------------
/apps/contract-verification/src/route.verify-legacy.ts:
--------------------------------------------------------------------------------
1 | import { getContainer } from '@cloudflare/containers'
2 | import { and, eq } from 'drizzle-orm'
3 | import { drizzle } from 'drizzle-orm/d1'
4 | import { Hono } from 'hono'
5 | import { bodyLimit } from 'hono/body-limit'
6 | import { Address, Hex } from 'ox'
7 | import { type Chain, createPublicClient, http, keccak256 } from 'viem'
8 |
9 | import {
10 | getVyperAuxdataStyle,
11 | getVyperImmutableReferences,
12 | type ImmutableReferences,
13 | type LinkReferences,
14 | matchBytecode,
15 | } from '#bytecode-matching.ts'
16 | import { chains, DEVNET_CHAIN_ID, TESTNET_CHAIN_ID } from '#chains.ts'
17 |
18 | import {
19 | codeTable,
20 | compiledContractsSignaturesTable,
21 | compiledContractsSourcesTable,
22 | compiledContractsTable,
23 | contractDeploymentsTable,
24 | contractsTable,
25 | type SignatureType,
26 | signaturesTable,
27 | sourcesTable,
28 | verifiedContractsTable,
29 | } from '#database/schema.ts'
30 | import { normalizeSourcePath, sourcifyError } from '#utilities.ts'
31 |
32 | /**
33 | * Legacy Sourcify-compatible routes for Foundry forge verify.
34 | *
35 | * POST /verify - Solidity verification
36 | * POST /verify/vyper - Vyper verification
37 | */
38 |
39 | const legacyVerifyRoute = new Hono<{ Bindings: Cloudflare.Env }>()
40 |
41 | legacyVerifyRoute.use(
42 | '*',
43 | bodyLimit({
44 | maxSize: 2 * 1024 * 1024, // 2mb
45 | onError: (context) => {
46 | const message = `[requestId: ${context.req.header('X-Tempo-Request-Id')}] Body limit exceeded`
47 | console.error(message)
48 | return sourcifyError(context, 413, 'body_too_large', message)
49 | },
50 | }),
51 | )
52 |
53 | interface LegacyVyperRequest {
54 | address: string
55 | chain: string
56 | files: Record<string, string>
57 | contractPath: string
58 | contractName: string
59 | compilerVersion: string
60 | compilerSettings?: object
61 | creatorTxHash?: string
62 | }
63 |
64 | // POST /verify/vyper - Legacy Sourcify Vyper verification (used by Foundry)
65 | legacyVerifyRoute.post('/vyper', async (context) => {
66 | try {
67 | const body = (await context.req.json()) as LegacyVyperRequest
68 |
69 | console.log('[verify/vyper] Request body:', JSON.stringify(body, null, 2))
70 |
71 | const {
72 | address,
73 | chain,
74 | files,
75 | contractPath,
76 | contractName,
77 | compilerVersion,
78 | compilerSettings,
79 | } = body
80 |
81 | const chainId = Number(chain)
82 | if (![DEVNET_CHAIN_ID, TESTNET_CHAIN_ID].includes(chainId)) {
83 | return sourcifyError(
84 | context,
85 | 400,
86 | 'unsupported_chain',
87 | `The chain with chainId ${chainId} is not supported`,
88 | )
89 | }
90 |
91 | if (!Address.validate(address, { strict: true })) {
92 | return sourcifyError(
93 | context,
94 | 400,
95 | 'invalid_address',
96 | `Invalid address: ${address}`,
97 | )
98 | }
99 |
100 | if (!files || Object.keys(files).length === 0) {
101 | return sourcifyError(
102 | context,
103 | 400,
104 | 'missing_files',
105 | 'No source files provided',
106 | )
107 | }
108 |
109 | if (!contractPath || !contractName || !compilerVersion) {
110 | return sourcifyError(
111 | context,
112 | 400,
113 | 'missing_params',
114 | 'contractPath, contractName, and compilerVersion are required',
115 | )
116 | }
117 |
118 | // Check if already verified
119 | const db = drizzle(context.env.CONTRACTS_DB)
120 | const addressBytes = Hex.toBytes(address as `0x${string}`)
121 |
122 | const existingVerification = await db
123 | .select({
124 | matchId: verifiedContractsTable.id,
125 | })
126 | .from(verifiedContractsTable)
127 | .innerJoin(
128 | contractDeploymentsTable,
129 | eq(verifiedContractsTable.deploymentId, contractDeploymentsTable.id),
130 | )
131 | .where(
132 | and(
133 | eq(contractDeploymentsTable.chainId, chainId),
134 | eq(contractDeploymentsTable.address, addressBytes),
135 | ),
136 | )
137 | .limit(1)
138 |
139 | if (existingVerification.length > 0) {
140 | return context.json({
141 | result: [{ address, chainId: chain, status: 'perfect' }],
142 | })
143 | }
144 |
145 | const chainConfig = chains[
146 | chainId as keyof typeof chains
147 | ] as unknown as Chain
148 | const client = createPublicClient({
149 | chain: chainConfig,
150 | transport: http(
151 | chainConfig.id === TESTNET_CHAIN_ID
152 | ? 'https://rpc-orchestra.testnet.tempo.xyz'
153 | : undefined,
154 | ),
155 | })
156 |
157 | const onchainBytecode = await client.getCode({
158 | address: address as `0x${string}`,
159 | })
160 | if (!onchainBytecode || onchainBytecode === '0x') {
161 | return context.json({
162 | result: [
163 | {
164 | address,
165 | chainId: chain,
166 | status: 'null',
167 | message: `Chain #${chainId} does not have a contract deployed at ${address}`,
168 | },
169 | ],
170 | })
171 | }
172 |
173 | // Convert legacy format to standard JSON input
174 | const sources: Record<string, { content: string }> = {}
175 | for (const [path, content] of Object.entries(files)) {
176 | sources[path] = { content }
177 | }
178 |
179 | const stdJsonInput = {
180 | language: 'Vyper',
181 | sources,
182 | settings: compilerSettings ?? {
183 | outputSelection: {
184 | '*': ['abi', 'evm.bytecode', 'evm.deployedBytecode'],
185 | },
186 | },
187 | }
188 |
189 | // Compile via container
190 | const container = getContainer(
191 | context.env.VERIFICATION_CONTAINER,
192 | 'singleton',
193 | )
194 |
195 | const compileResponse = await container.fetch(
196 | new Request('http://container/compile/vyper', {
197 | method: 'POST',
198 | headers: { 'Content-Type': 'application/json' },
199 | body: JSON.stringify({
200 | compilerVersion,
201 | input: stdJsonInput,
202 | }),
203 | }),
204 | )
205 |
206 | if (!compileResponse.ok) {
207 | const errorText = await compileResponse.text()
208 | return sourcifyError(context, 500, 'compilation_failed', errorText)
209 | }
210 |
211 | const compileOutput = (await compileResponse.json()) as {
212 | contracts?: Record<
213 | string,
214 | Record<
215 | string,
216 | {
217 | abi: Array<{
218 | type: string
219 | name?: string
220 | inputs?: Array<{ type: string; name?: string }>
221 | }>
222 | evm: {
223 | bytecode: {
224 | object: string
225 | linkReferences?: LinkReferences
226 | sourceMap?: string
227 | }
228 | deployedBytecode: {
229 | object: string
230 | linkReferences?: LinkReferences
231 | immutableReferences?: ImmutableReferences
232 | sourceMap?: string
233 | }
234 | }
235 | metadata?: string
236 | storageLayout?: unknown
237 | userdoc?: unknown
238 | devdoc?: unknown
239 | }
240 | >
241 | >
242 | errors?: Array<{
243 | severity: string
244 | message: string
245 | formattedMessage?: string
246 | }>
247 | }
248 |
249 | const errors =
250 | compileOutput.errors?.filter((e) => e.severity === 'error') ?? []
251 | if (errors.length > 0) {
252 | return sourcifyError(
253 | context,
254 | 400,
255 | 'compilation_error',
256 | errors.map((e) => e.formattedMessage ?? e.message).join('\n'),
257 | )
258 | }
259 |
260 | console.log(
261 | '[verify/vyper] Compile output contracts:',
262 | JSON.stringify(Object.keys(compileOutput.contracts ?? {})),
263 | )
264 | console.log('[verify/vyper] Looking for:', contractPath, contractName)
265 |
266 | // Get compiled bytecode for the target contract
267 | const compiledContract =
268 | compileOutput.contracts?.[contractPath]?.[contractName]
269 | if (!compiledContract) {
270 | console.log(
271 | '[verify/vyper] Available in path:',
272 | compileOutput.contracts?.[contractPath]
273 | ? Object.keys(compileOutput.contracts[contractPath])
274 | : 'path not found',
275 | )
276 | return sourcifyError(
277 | context,
278 | 400,
279 | 'contract_not_found_in_output',
280 | `Could not find ${contractName} in ${contractPath}`,
281 | )
282 | }
283 |
284 | const compiledBytecode = `0x${compiledContract.evm.deployedBytecode.object}`
285 | const creationBytecodeRaw = `0x${compiledContract.evm.bytecode.object}`
286 |
287 | const auxdataStyle = getVyperAuxdataStyle(compilerVersion)
288 |
289 | const immutableReferences = getVyperImmutableReferences(
290 | compilerVersion,
291 | creationBytecodeRaw,
292 | compiledBytecode,
293 | )
294 |
295 | const runtimeMatchResult = matchBytecode({
296 | onchainBytecode: onchainBytecode,
297 | recompiledBytecode: compiledBytecode,
298 | isCreation: false,
299 | linkReferences: undefined,
300 | immutableReferences,
301 | auxdataStyle,
302 | abi: compiledContract.abi,
303 | })
304 |
305 | if (runtimeMatchResult.match === null) {
306 | return context.json(
307 | {
308 | error:
309 | runtimeMatchResult.message ||
310 | "The deployed and recompiled bytecode don't match.",
311 | },
312 | 500,
313 | )
314 | }
315 |
316 | const isExactMatch = runtimeMatchResult.match === 'exact_match'
317 | const auditUser = 'verification-api'
318 | const contractIdentifier = `${contractPath}:${contractName}`
319 |
320 | // Compute hashes for runtime bytecode
321 | const runtimeBytecodeBytes = Hex.toBytes(compiledBytecode as `0x${string}`)
322 | const runtimeCodeHashSha256 = new Uint8Array(
323 | await globalThis.crypto.subtle.digest(
324 | 'SHA-256',
325 | new TextEncoder().encode(compiledBytecode as `0x${string}`),
326 | ),
327 | )
328 | const runtimeCodeHashKeccak = Hex.toBytes(
329 | keccak256(compiledBytecode as `0x${string}`),
330 | )
331 |
332 | // Compute hashes for creation bytecode
333 | const creationBytecode = `0x${compiledContract.evm.bytecode.object}`
334 | const creationBytecodeBytes = Hex.toBytes(creationBytecode as `0x${string}`)
335 | const creationCodeHashSha256 = new Uint8Array(
336 | await globalThis.crypto.subtle.digest(
337 | 'SHA-256',
338 | new TextEncoder().encode(creationBytecode as `0x${string}`),
339 | ),
340 | )
341 | const creationCodeHashKeccak = Hex.toBytes(
342 | keccak256(creationBytecode as `0x${string}`),
343 | )
344 |
345 | // Insert runtime code
346 | await db
347 | .insert(codeTable)
348 | .values({
349 | codeHash: runtimeCodeHashSha256,
350 | codeHashKeccak: runtimeCodeHashKeccak,
351 | code: runtimeBytecodeBytes,
352 | createdBy: auditUser,
353 | updatedBy: auditUser,
354 | })
355 | .onConflictDoNothing()
356 |
357 | // Insert creation code
358 | await db
359 | .insert(codeTable)
360 | .values({
361 | codeHash: creationCodeHashSha256,
362 | codeHashKeccak: creationCodeHashKeccak,
363 | code: creationBytecodeBytes,
364 | createdBy: auditUser,
365 | updatedBy: auditUser,
366 | })
367 | .onConflictDoNothing()
368 |
369 | // Get or create contract
370 | const existingContract = await db
371 | .select({ id: contractsTable.id })
372 | .from(contractsTable)
373 | .where(eq(contractsTable.runtimeCodeHash, runtimeCodeHashSha256))
374 | .limit(1)
375 |
376 | let contractId: string
377 | if (existingContract.length > 0 && existingContract[0]) {
378 | contractId = existingContract[0].id
379 | } else {
380 | contractId = globalThis.crypto.randomUUID()
381 | await db.insert(contractsTable).values({
382 | id: contractId,
383 | creationCodeHash: creationCodeHashSha256,
384 | runtimeCodeHash: runtimeCodeHashSha256,
385 | createdBy: auditUser,
386 | updatedBy: auditUser,
387 | })
388 | }
389 |
390 | // Get or create deployment
391 | const existingDeployment = await db
392 | .select({ id: contractDeploymentsTable.id })
393 | .from(contractDeploymentsTable)
394 | .where(
395 | and(
396 | eq(contractDeploymentsTable.chainId, chainId),
397 | eq(contractDeploymentsTable.address, addressBytes),
398 | ),
399 | )
400 | .limit(1)
401 |
402 | let deploymentId: string
403 | if (existingDeployment.length > 0 && existingDeployment[0]) {
404 | deploymentId = existingDeployment[0].id
405 | } else {
406 | deploymentId = globalThis.crypto.randomUUID()
407 | await db.insert(contractDeploymentsTable).values({
408 | id: deploymentId,
409 | chainId: chainId,
410 | address: addressBytes,
411 | contractId,
412 | createdBy: auditUser,
413 | updatedBy: auditUser,
414 | })
415 | }
416 |
417 | // Get or create compiled contract
418 | const existingCompilation = await db
419 | .select({ id: compiledContractsTable.id })
420 | .from(compiledContractsTable)
421 | .where(
422 | and(
423 | eq(compiledContractsTable.runtimeCodeHash, runtimeCodeHashSha256),
424 | eq(compiledContractsTable.compiler, 'vyper'),
425 | eq(compiledContractsTable.version, compilerVersion),
426 | ),
427 | )
428 | .limit(1)
429 |
430 | let compilationId: string
431 | if (existingCompilation.length > 0 && existingCompilation[0]) {
432 | compilationId = existingCompilation[0].id
433 | } else {
434 | compilationId = globalThis.crypto.randomUUID()
435 |
436 | const creationCodeArtifacts = {
437 | sourceMap: compiledContract.evm.bytecode.sourceMap,
438 | }
439 | const runtimeCodeArtifacts = {
440 | sourceMap: compiledContract.evm.deployedBytecode.sourceMap,
441 | immutableReferences,
442 | }
443 | const compilationArtifacts = {
444 | abi: compiledContract.abi,
445 | metadata: compiledContract.metadata,
446 | storageLayout: compiledContract.storageLayout,
447 | userdoc: compiledContract.userdoc,
448 | devdoc: compiledContract.devdoc,
449 | }
450 |
451 | await db.insert(compiledContractsTable).values({
452 | id: compilationId,
453 | compiler: 'vyper',
454 | version: compilerVersion,
455 | language: 'Vyper',
456 | name: contractName,
457 | fullyQualifiedName: contractIdentifier,
458 | compilerSettings: JSON.stringify(stdJsonInput.settings),
459 | compilationArtifacts: JSON.stringify(compilationArtifacts),
460 | creationCodeHash: creationCodeHashSha256,
461 | creationCodeArtifacts: JSON.stringify(creationCodeArtifacts),
462 | runtimeCodeHash: runtimeCodeHashSha256,
463 | runtimeCodeArtifacts: JSON.stringify(runtimeCodeArtifacts),
464 | createdBy: auditUser,
465 | updatedBy: auditUser,
466 | })
467 | }
468 |
469 | // Insert sources
470 | for (const [sourcePath, sourceContent] of Object.entries(files)) {
471 | const contentBytes = new TextEncoder().encode(sourceContent)
472 | const sourceHashSha256 = new Uint8Array(
473 | await globalThis.crypto.subtle.digest('SHA-256', contentBytes),
474 | )
475 | const sourceHashKeccak = Hex.toBytes(
476 | keccak256(Hex.fromBytes(contentBytes)),
477 | )
478 |
479 | await db
480 | .insert(sourcesTable)
481 | .values({
482 | sourceHash: sourceHashSha256,
483 | sourceHashKeccak: sourceHashKeccak,
484 | content: sourceContent,
485 | createdBy: auditUser,
486 | updatedBy: auditUser,
487 | })
488 | .onConflictDoNothing()
489 |
490 | // Normalize path (convert absolute to relative)
491 | const normalizedPath = normalizeSourcePath(sourcePath)
492 | await db
493 | .insert(compiledContractsSourcesTable)
494 | .values({
495 | id: globalThis.crypto.randomUUID(),
496 | compilationId: compilationId,
497 | sourceHash: sourceHashSha256,
498 | path: normalizedPath,
499 | })
500 | .onConflictDoNothing()
501 | }
502 |
503 | // Extract and insert signatures from ABI
504 | const abi = compiledContract.abi
505 | for (const item of abi) {
506 | let signatureType: SignatureType | null = null
507 | if (item.type === 'function') signatureType = 'function'
508 | else if (item.type === 'event') signatureType = 'event'
509 | else if (item.type === 'error') signatureType = 'error'
510 |
511 | if (signatureType && item.name) {
512 | const inputTypes = (item.inputs ?? []).map((i) => i.type).join(',')
513 | const signature = `${item.name}(${inputTypes})`
514 | const signatureHash32 = Hex.toBytes(
515 | keccak256(Hex.fromString(signature)),
516 | )
517 |
518 | await db
519 | .insert(signaturesTable)
520 | .values({ signatureHash32, signature })
521 | .onConflictDoNothing()
522 |
523 | await db
524 | .insert(compiledContractsSignaturesTable)
525 | .values({
526 | id: globalThis.crypto.randomUUID(),
527 | compilationId,
528 | signatureHash32,
529 | signatureType,
530 | })
531 | .onConflictDoNothing()
532 | }
533 | }
534 |
535 | // Insert verified contract
536 | await db
537 | .insert(verifiedContractsTable)
538 | .values({
539 | deploymentId,
540 | compilationId,
541 | creationMatch: false,
542 | runtimeMatch: true,
543 | runtimeMetadataMatch: isExactMatch,
544 | runtimeValues:
545 | Object.keys(runtimeMatchResult.transformationValues).length > 0
546 | ? JSON.stringify(runtimeMatchResult.transformationValues)
547 | : null,
548 | runtimeTransformations:
549 | runtimeMatchResult.transformations.length > 0
550 | ? JSON.stringify(runtimeMatchResult.transformations)
551 | : null,
552 | createdBy: auditUser,
553 | updatedBy: auditUser,
554 | })
555 | .onConflictDoNothing()
556 |
557 | // Return legacy Sourcify format
558 | return context.json({
559 | result: [
560 | {
561 | address,
562 | chainId: chain,
563 | status: isExactMatch ? 'perfect' : 'partial',
564 | },
565 | ],
566 | })
567 | } catch (error) {
568 | console.error(error)
569 | return context.json({ error: 'An unexpected error occurred' }, 500)
570 | }
571 | })
572 |
573 | export { legacyVerifyRoute }
574 |
--------------------------------------------------------------------------------
/apps/contract-verification/src/route.lookup.ts:
--------------------------------------------------------------------------------
1 | import { and, asc, desc, eq, gt, lt } from 'drizzle-orm'
2 | import { drizzle } from 'drizzle-orm/d1'
3 | import { Hono } from 'hono'
4 | import { Address, Hex } from 'ox'
5 |
6 | import { DEVNET_CHAIN_ID, TESTNET_CHAIN_ID } from '#chains.ts'
7 |
8 | import {
9 | codeTable,
10 | compiledContractsSignaturesTable,
11 | compiledContractsSourcesTable,
12 | compiledContractsTable,
13 | contractDeploymentsTable,
14 | signaturesTable,
15 | sourcesTable,
16 | verifiedContractsTable,
17 | } from '#database/schema.ts'
18 | import { sourcifyError } from '#utilities.ts'
19 |
20 | /**
21 | * GET /v2/contract/{chainId}/{address}
22 | * GET /v2/contract/all-chains/{address}
23 | * GET /v2/contracts/{chainId}
24 | */
25 |
26 | const lookupRoute = new Hono<{ Bindings: Cloudflare.Env }>()
27 | const lookupAllChainContractsRoute = new Hono<{ Bindings: Cloudflare.Env }>()
28 |
29 | // GET /v2/contract/all-chains/:address - Get verified contract at an address on all chains
30 | // Note: This route must be defined before /:chainId/:address to avoid matching conflicts
31 | lookupRoute.get('/all-chains/:address', async (context) => {
32 | try {
33 | const { address } = context.req.param()
34 |
35 | if (!Address.validate(address, { strict: true }))
36 | return sourcifyError(
37 | context,
38 | 400,
39 | 'invalid_address',
40 | `Invalid address: ${address}`,
41 | )
42 |
43 | const db = drizzle(context.env.CONTRACTS_DB)
44 | const addressBytes = Hex.toBytes(address as `0x${string}`)
45 |
46 | // Query all verified contracts at this address across all chains
47 | const results = await db
48 | .select({
49 | matchId: verifiedContractsTable.id,
50 | verifiedAt: verifiedContractsTable.createdAt,
51 | runtimeMatch: verifiedContractsTable.runtimeMatch,
52 | creationMatch: verifiedContractsTable.creationMatch,
53 | runtimeMetadataMatch: verifiedContractsTable.runtimeMetadataMatch,
54 | creationMetadataMatch: verifiedContractsTable.creationMetadataMatch,
55 | chainId: contractDeploymentsTable.chainId,
56 | address: contractDeploymentsTable.address,
57 | })
58 | .from(verifiedContractsTable)
59 | .innerJoin(
60 | contractDeploymentsTable,
61 | eq(verifiedContractsTable.deploymentId, contractDeploymentsTable.id),
62 | )
63 | .innerJoin(
64 | compiledContractsTable,
65 | eq(verifiedContractsTable.compilationId, compiledContractsTable.id),
66 | )
67 | .where(eq(contractDeploymentsTable.address, addressBytes))
68 |
69 | // Transform results to minimal format per OpenAPI spec
70 | const contracts = results.map((row) => {
71 | const runtimeMatchStatus = row.runtimeMatch ? 'exact_match' : 'match'
72 | const creationMatchStatus = row.creationMatch ? 'exact_match' : 'match'
73 | const matchStatus =
74 | runtimeMatchStatus === 'exact_match' ||
75 | creationMatchStatus === 'exact_match'
76 | ? 'exact_match'
77 | : runtimeMatchStatus || creationMatchStatus
78 |
79 | return {
80 | matchId: row.matchId,
81 | match: matchStatus,
82 | creationMatch: creationMatchStatus,
83 | runtimeMatch: runtimeMatchStatus,
84 | chainId: row.chainId,
85 | address: Hex.fromBytes(new Uint8Array(row.address as ArrayBuffer)),
86 | verifiedAt: row.verifiedAt,
87 | }
88 | })
89 |
90 | return context.json({ results: contracts })
91 | } catch (error) {
92 | console.error(error)
93 | return sourcifyError(
94 | context,
95 | 500,
96 | 'internal_error',
97 | 'An unexpected error occurred',
98 | )
99 | }
100 | })
101 |
102 | // GET /v2/contract/:chainId/:address - Get verified contract
103 | lookupRoute.get('/:chainId/:address', async (context) => {
104 | try {
105 | const { chainId, address } = context.req.param()
106 | const { fields, omit } = context.req.query()
107 |
108 | if (![DEVNET_CHAIN_ID, TESTNET_CHAIN_ID].includes(Number(chainId)))
109 | return sourcifyError(
110 | context,
111 | 400,
112 | 'unsupported_chain',
113 | `The chain with chainId ${chainId} is not supported`,
114 | )
115 |
116 | if (!Address.validate(address, { strict: true }))
117 | return sourcifyError(
118 | context,
119 | 400,
120 | 'invalid_address',
121 | `Invalid address: ${address}`,
122 | )
123 |
124 | if (fields && omit)
125 | return sourcifyError(
126 | context,
127 | 400,
128 | 'invalid_params',
129 | 'Cannot use both fields and omit query parameters simultaneously',
130 | )
131 |
132 | const db = drizzle(context.env.CONTRACTS_DB)
133 | const addressBytes = Hex.toBytes(address)
134 |
135 | // Query verified contract at this address on the specified chain
136 | const results = await db
137 | .select({
138 | // For minimal response
139 | matchId: verifiedContractsTable.id,
140 | verifiedAt: verifiedContractsTable.createdAt,
141 | runtimeMatch: verifiedContractsTable.runtimeMatch,
142 | creationMatch: verifiedContractsTable.creationMatch,
143 | runtimeMetadataMatch: verifiedContractsTable.runtimeMetadataMatch,
144 | creationMetadataMatch: verifiedContractsTable.creationMetadataMatch,
145 | runtimeValues: verifiedContractsTable.runtimeValues,
146 | creationValues: verifiedContractsTable.creationValues,
147 | runtimeTransformations: verifiedContractsTable.runtimeTransformations,
148 | creationTransformations: verifiedContractsTable.creationTransformations,
149 | // For extended response
150 | chainId: contractDeploymentsTable.chainId,
151 | address: contractDeploymentsTable.address,
152 | transactionHash: contractDeploymentsTable.transactionHash,
153 | blockNumber: contractDeploymentsTable.blockNumber,
154 | transactionIndex: contractDeploymentsTable.transactionIndex,
155 | deployer: contractDeploymentsTable.deployer,
156 | // Compilation info
157 | compilationId: compiledContractsTable.id,
158 | contractName: compiledContractsTable.name,
159 | fullyQualifiedName: compiledContractsTable.fullyQualifiedName,
160 | compiler: compiledContractsTable.compiler,
161 | version: compiledContractsTable.version,
162 | language: compiledContractsTable.language,
163 | compilerSettings: compiledContractsTable.compilerSettings,
164 | compilationArtifacts: compiledContractsTable.compilationArtifacts,
165 | creationCodeArtifacts: compiledContractsTable.creationCodeArtifacts,
166 | runtimeCodeArtifacts: compiledContractsTable.runtimeCodeArtifacts,
167 | creationCodeHash: compiledContractsTable.creationCodeHash,
168 | runtimeCodeHash: compiledContractsTable.runtimeCodeHash,
169 | })
170 | .from(verifiedContractsTable)
171 | .innerJoin(
172 | contractDeploymentsTable,
173 | eq(verifiedContractsTable.deploymentId, contractDeploymentsTable.id),
174 | )
175 | .innerJoin(
176 | compiledContractsTable,
177 | eq(verifiedContractsTable.compilationId, compiledContractsTable.id),
178 | )
179 | .where(
180 | and(
181 | eq(contractDeploymentsTable.chainId, Number(chainId)),
182 | eq(contractDeploymentsTable.address, addressBytes),
183 | ),
184 | )
185 | .limit(1)
186 |
187 | if (results.length === 0)
188 | return sourcifyError(
189 | context,
190 | 404,
191 | 'contract_not_found',
192 | `Contract ${address} on chain ${chainId} not found or not verified`,
193 | )
194 |
195 | const [row] = results
196 | if (!row) {
197 | return sourcifyError(
198 | context,
199 | 404,
200 | 'contract_not_found',
201 | `Contract ${address} on chain ${chainId} not found or not verified`,
202 | )
203 | }
204 |
205 | // Compute match statuses per OpenAPI spec
206 | const runtimeMatchStatus = row.runtimeMatch ? 'exact_match' : 'match'
207 | const creationMatchStatus = row.creationMatch ? 'exact_match' : 'match'
208 | // Overall match: best of runtime or creation
209 | const matchStatus =
210 | runtimeMatchStatus === 'exact_match' ||
211 | creationMatchStatus === 'exact_match'
212 | ? 'exact_match'
213 | : runtimeMatchStatus || creationMatchStatus
214 |
215 | const formattedAddress = Hex.fromBytes(
216 | new Uint8Array(row.address as ArrayBuffer),
217 | )
218 |
219 | // Minimal response (default)
220 | const minimalResponse = {
221 | matchId: row.matchId,
222 | match: matchStatus,
223 | creationMatch: creationMatchStatus,
224 | runtimeMatch: runtimeMatchStatus,
225 | chainId: row.chainId,
226 | address: formattedAddress,
227 | verifiedAt: row.verifiedAt,
228 | }
229 |
230 | // If no fields requested, return minimal response
231 | if (!fields && !omit) return context.json(minimalResponse)
232 |
233 | // Fetch bytecode from code table
234 | const [creationCode, runtimeCode] = await Promise.all([
235 | row.creationCodeHash
236 | ? db
237 | .select({ code: codeTable.code })
238 | .from(codeTable)
239 | .where(eq(codeTable.codeHash, row.creationCodeHash))
240 | .limit(1)
241 | : Promise.resolve([]),
242 | row.runtimeCodeHash
243 | ? db
244 | .select({ code: codeTable.code })
245 | .from(codeTable)
246 | .where(eq(codeTable.codeHash, row.runtimeCodeHash))
247 | .limit(1)
248 | : Promise.resolve([]),
249 | ])
250 |
251 | // Fetch sources
252 | const sourcesResult = await db
253 | .select({
254 | path: compiledContractsSourcesTable.path,
255 | content: sourcesTable.content,
256 | sourceHash: sourcesTable.sourceHash,
257 | })
258 | .from(compiledContractsSourcesTable)
259 | .innerJoin(
260 | sourcesTable,
261 | eq(compiledContractsSourcesTable.sourceHash, sourcesTable.sourceHash),
262 | )
263 | .where(eq(compiledContractsSourcesTable.compilationId, row.compilationId))
264 |
265 | // Fetch signatures
266 | const signaturesResult = await db
267 | .select({
268 | signature: signaturesTable.signature,
269 | signatureType: compiledContractsSignaturesTable.signatureType,
270 | signatureHash32: signaturesTable.signatureHash32,
271 | })
272 | .from(compiledContractsSignaturesTable)
273 | .innerJoin(
274 | signaturesTable,
275 | eq(
276 | compiledContractsSignaturesTable.signatureHash32,
277 | signaturesTable.signatureHash32,
278 | ),
279 | )
280 | .where(
281 | eq(compiledContractsSignaturesTable.compilationId, row.compilationId),
282 | )
283 |
284 | // Build sources object, preferring normalized (relative) paths over absolute paths
285 | const sources: Record<string, { content: string }> = {}
286 | const sourceIds: Record<string, string> = {}
287 | const seenContentHashes = new Set<string>()
288 |
289 | // Sort to process relative paths first, then absolute paths
290 | const sortedSources = [...sourcesResult].sort((a, b) => {
291 | const aIsAbsolute = a.path.startsWith('/')
292 | const bIsAbsolute = b.path.startsWith('/')
293 | if (aIsAbsolute === bIsAbsolute) return 0
294 | return aIsAbsolute ? 1 : -1 // Relative paths first
295 | })
296 |
297 | for (const source of sortedSources) {
298 | const hashHex = Hex.fromBytes(
299 | new Uint8Array(source.sourceHash as ArrayBuffer),
300 | )
301 | // Skip if we already have this source content (prefer relative path)
302 | if (seenContentHashes.has(hashHex)) continue
303 | seenContentHashes.add(hashHex)
304 |
305 | sources[source.path] = { content: source.content }
306 | sourceIds[source.path] = hashHex
307 | }
308 |
309 | // Build signatures object (Sourcify format: grouped by type)
310 | const signatures: {
311 | function: Array<{
312 | signature: string
313 | signatureHash32: string
314 | signatureHash4: string
315 | }>
316 | event: Array<{
317 | signature: string
318 | signatureHash32: string
319 | signatureHash4: string
320 | }>
321 | error: Array<{
322 | signature: string
323 | signatureHash32: string
324 | signatureHash4: string
325 | }>
326 | } = { function: [], event: [], error: [] }
327 |
328 | for (const sig of signaturesResult) {
329 | const hash32Bytes = new Uint8Array(sig.signatureHash32 as ArrayBuffer)
330 | const signatureHash32 = Hex.fromBytes(hash32Bytes)
331 | const signatureHash4 = Hex.fromBytes(hash32Bytes.slice(0, 4))
332 | const type = sig.signatureType as 'function' | 'event' | 'error'
333 |
334 | signatures[type].push({
335 | signature: sig.signature,
336 | signatureHash32,
337 | signatureHash4,
338 | })
339 | }
340 |
341 | // Build full response for field filtering
342 | const artifacts = JSON.parse(row.compilationArtifacts ?? '{}') as {
343 | abi?: unknown[]
344 | userdoc?: unknown
345 | devdoc?: unknown
346 | storageLayout?: unknown
347 | metadata?: unknown
348 | }
349 |
350 | const creationCodeArtifacts = JSON.parse(
351 | row.creationCodeArtifacts ?? '{}',
352 | ) as {
353 | sourceMap?: string
354 | linkReferences?: unknown
355 | cborAuxdata?: unknown
356 | }
357 |
358 | const runtimeCodeArtifacts = JSON.parse(
359 | row.runtimeCodeArtifacts ?? '{}',
360 | ) as {
361 | sourceMap?: string
362 | linkReferences?: unknown
363 | immutableReferences?: unknown
364 | cborAuxdata?: unknown
365 | }
366 |
367 | const creationBytecodeData = creationCode[0]?.code
368 | ? Hex.fromBytes(new Uint8Array(creationCode[0].code as ArrayBuffer))
369 | : null
370 | const runtimeBytecodeData = runtimeCode[0]?.code
371 | ? Hex.fromBytes(new Uint8Array(runtimeCode[0].code as ArrayBuffer))
372 | : null
373 |
374 | // Build stdJsonInput
375 | const stdJsonInput = {
376 | language: row.language,
377 | sources: Object.fromEntries(
378 | Object.entries(sources).map(([path, { content }]) => [
379 | path,
380 | { content },
381 | ]),
382 | ),
383 | settings: JSON.parse(row.compilerSettings),
384 | }
385 |
386 | // Build stdJsonOutput (partial - what we have stored)
387 | const stdJsonOutput = {
388 | contracts: {
389 | [row.fullyQualifiedName.split(':')[0] ?? '']: {
390 | [row.contractName]: {
391 | abi: artifacts.abi,
392 | metadata:
393 | typeof artifacts.metadata === 'string'
394 | ? artifacts.metadata
395 | : JSON.stringify(artifacts.metadata ?? {}),
396 | userdoc: artifacts.userdoc,
397 | devdoc: artifacts.devdoc,
398 | storageLayout: artifacts.storageLayout,
399 | evm: {
400 | bytecode: {
401 | object: creationBytecodeData,
402 | sourceMap: creationCodeArtifacts.sourceMap,
403 | linkReferences: creationCodeArtifacts.linkReferences,
404 | },
405 | deployedBytecode: {
406 | object: runtimeBytecodeData,
407 | sourceMap: runtimeCodeArtifacts.sourceMap,
408 | linkReferences: runtimeCodeArtifacts.linkReferences,
409 | immutableReferences: runtimeCodeArtifacts.immutableReferences,
410 | },
411 | },
412 | },
413 | },
414 | },
415 | }
416 |
417 | const fullResponse: Record<string, unknown> = {
418 | ...minimalResponse,
419 | transactionHash: row.transactionHash
420 | ? Hex.fromBytes(new Uint8Array(row.transactionHash as ArrayBuffer))
421 | : null,
422 | blockNumber: row.blockNumber,
423 | name: row.contractName,
424 | fullyQualifiedName: row.fullyQualifiedName,
425 | compiler: row.compiler,
426 | version: row.version,
427 | language: row.language,
428 | compilerSettings: JSON.parse(row.compilerSettings),
429 | runtimeMetadataMatch: row.runtimeMetadataMatch ? 'exact_match' : 'match',
430 | creationMetadataMatch: row.creationMetadataMatch
431 | ? 'exact_match'
432 | : 'match',
433 | abi: artifacts.abi ?? null,
434 | userdoc: artifacts.userdoc ?? null,
435 | devdoc: artifacts.devdoc ?? null,
436 | storageLayout: artifacts.storageLayout ?? null,
437 | metadata: artifacts.metadata ?? null,
438 | sources,
439 | sourceIds,
440 | signatures,
441 | creationBytecode: creationBytecodeData
442 | ? {
443 | bytecode: creationBytecodeData,
444 | sourceMap: creationCodeArtifacts.sourceMap ?? null,
445 | linkReferences: creationCodeArtifacts.linkReferences ?? null,
446 | cborAuxdata: creationCodeArtifacts.cborAuxdata ?? null,
447 | }
448 | : null,
449 | runtimeBytecode: runtimeBytecodeData
450 | ? {
451 | bytecode: runtimeBytecodeData,
452 | sourceMap: runtimeCodeArtifacts.sourceMap ?? null,
453 | linkReferences: runtimeCodeArtifacts.linkReferences ?? null,
454 | immutableReferences:
455 | runtimeCodeArtifacts.immutableReferences ?? null,
456 | cborAuxdata: runtimeCodeArtifacts.cborAuxdata ?? null,
457 | }
458 | : null,
459 | compilation: {
460 | compiler: row.compiler,
461 | version: row.version,
462 | language: row.language,
463 | name: row.contractName,
464 | fullyQualifiedName: row.fullyQualifiedName,
465 | compilerSettings: JSON.parse(row.compilerSettings),
466 | },
467 | deployment: {
468 | chainId: row.chainId,
469 | address: formattedAddress,
470 | transactionHash: row.transactionHash
471 | ? Hex.fromBytes(new Uint8Array(row.transactionHash as ArrayBuffer))
472 | : null,
473 | blockNumber: row.blockNumber,
474 | transactionIndex: row.transactionIndex,
475 | deployer: row.deployer
476 | ? Hex.fromBytes(new Uint8Array(row.deployer as ArrayBuffer))
477 | : null,
478 | },
479 | stdJsonInput,
480 | stdJsonOutput,
481 | proxyResolution: null, // Not implemented yet
482 | }
483 |
484 | // Apply field filtering
485 | if (fields) {
486 | if (fields === 'all') return context.json(fullResponse)
487 | const fieldList = fields.split(',').map((f) => f.trim())
488 | const filtered: Record<string, unknown> = {
489 | // Always include minimal fields
490 | ...minimalResponse,
491 | }
492 | for (const field of fieldList)
493 | if (field in fullResponse) filtered[field] = fullResponse[field]
494 |
495 | return context.json(filtered)
496 | }
497 |
498 | if (omit) {
499 | const omitList = omit.split(',').map((f) => f.trim())
500 | for (const field of omitList) delete fullResponse[field]
501 |
502 | return context.json(fullResponse)
503 | }
504 |
505 | return context.json(minimalResponse)
506 | } catch (error) {
507 | console.error(error)
508 | return sourcifyError(
509 | context,
510 | 500,
511 | 'internal_error',
512 | 'An unexpected error occurred',
513 | )
514 | }
515 | })
516 |
517 | // GET /v2/contracts/:chainId - List verified contracts on a specific chain
518 | lookupAllChainContractsRoute.get('/:chainId', async (context) => {
519 | try {
520 | const { chainId } = context.req.param()
521 | const { sort, limit, afterMatchId } = context.req.query()
522 |
523 | if (![DEVNET_CHAIN_ID, TESTNET_CHAIN_ID].includes(Number(chainId)))
524 | return sourcifyError(
525 | context,
526 | 400,
527 | 'unsupported_chain',
528 | `The chain with chainId ${chainId} is not supported`,
529 | )
530 |
531 | // Validate and parse query params
532 | const sortOrder = sort === 'asc' ? 'asc' : 'desc'
533 | const limitNum = Math.min(Math.max(Number(limit) || 200, 1), 200)
534 |
535 | const db = drizzle(context.env.CONTRACTS_DB)
536 |
537 | // Build query
538 | const query = db
539 | .select({
540 | matchId: verifiedContractsTable.id,
541 | verifiedAt: verifiedContractsTable.createdAt,
542 | runtimeMatch: verifiedContractsTable.runtimeMatch,
543 | creationMatch: verifiedContractsTable.creationMatch,
544 | chainId: contractDeploymentsTable.chainId,
545 | address: contractDeploymentsTable.address,
546 | })
547 | .from(verifiedContractsTable)
548 | .innerJoin(
549 | contractDeploymentsTable,
550 | eq(verifiedContractsTable.deploymentId, contractDeploymentsTable.id),
551 | )
552 | .where(
553 | afterMatchId
554 | ? and(
555 | eq(contractDeploymentsTable.chainId, Number(chainId)),
556 | sortOrder === 'desc'
557 | ? lt(verifiedContractsTable.id, Number(afterMatchId))
558 | : gt(verifiedContractsTable.id, Number(afterMatchId)),
559 | )
560 | : eq(contractDeploymentsTable.chainId, Number(chainId)),
561 | )
562 | .orderBy(
563 | sortOrder === 'desc'
564 | ? desc(verifiedContractsTable.id)
565 | : asc(verifiedContractsTable.id),
566 | )
567 | .limit(limitNum)
568 |
569 | const results = await query
570 |
571 | // Transform results to match OpenAPI spec
572 | const contracts = results.map((row) => {
573 | const runtimeMatchStatus = row.runtimeMatch ? 'exact_match' : 'match'
574 | const creationMatchStatus = row.creationMatch ? 'exact_match' : 'match'
575 | const matchStatus =
576 | runtimeMatchStatus === 'exact_match' ||
577 | creationMatchStatus === 'exact_match'
578 | ? 'exact_match'
579 | : 'match'
580 |
581 | return {
582 | matchId: row.matchId,
583 | match: matchStatus,
584 | creationMatch: creationMatchStatus,
585 | runtimeMatch: runtimeMatchStatus,
586 | chainId: row.chainId,
587 | address: Hex.fromBytes(new Uint8Array(row.address as ArrayBuffer)),
588 | verifiedAt: row.verifiedAt,
589 | }
590 | })
591 |
592 | return context.json({ results: contracts })
593 | } catch (error) {
594 | console.error(error)
595 | return sourcifyError(
596 | context,
597 | 500,
598 | 'internal_error',
599 | 'An unexpected error occurred',
600 | )
601 | }
602 | })
603 |
604 | export { lookupRoute, lookupAllChainContractsRoute }
605 |
--------------------------------------------------------------------------------
/apps/contract-verification/src/route.verify.ts:
--------------------------------------------------------------------------------
1 | import { getContainer } from '@cloudflare/containers'
2 | import { and, eq } from 'drizzle-orm'
3 | import { drizzle } from 'drizzle-orm/d1'
4 | import { Hono } from 'hono'
5 | import { bodyLimit } from 'hono/body-limit'
6 | import { Address, Hex } from 'ox'
7 | import { type Chain, createPublicClient, http, keccak256 } from 'viem'
8 |
9 | import {
10 | AuxdataStyle,
11 | getVyperAuxdataStyle,
12 | getVyperImmutableReferences,
13 | type ImmutableReferences,
14 | type LinkReferences,
15 | matchBytecode,
16 | } from '#bytecode-matching.ts'
17 | import { chains, DEVNET_CHAIN_ID, TESTNET_CHAIN_ID } from '#chains.ts'
18 |
19 | import {
20 | codeTable,
21 | compiledContractsSignaturesTable,
22 | compiledContractsSourcesTable,
23 | compiledContractsTable,
24 | contractDeploymentsTable,
25 | contractsTable,
26 | type SignatureType,
27 | signaturesTable,
28 | sourcesTable,
29 | verifiedContractsTable,
30 | } from '#database/schema.ts'
31 | import { normalizeSourcePath, sourcifyError } from '#utilities.ts'
32 |
33 | /**
34 | * TODO:
35 | * - handle different solc versions
36 | * - routes:
37 | * - /metadata/:chainId/:address
38 | * - /similarity/:chainId/:address
39 | * - /:verificationId
40 | */
41 |
42 | /**
43 | * /verify:
44 | *
45 | * POST /v2/verify/{chainId}/{address}
46 | * POST /v2/verify/metadata/{chainId}/{address}
47 | * POST /v2/verify/similarity/{chainId}/{address}
48 | * GET /v2/verify/{verificationId}
49 | *
50 | * (deprecated ones but still supported by foundry forge):
51 | *
52 | * POST /verify
53 | * POST /verify/vyper
54 | * POST /verify/etherscan
55 | * POST /verify/solc-json
56 | */
57 |
58 | const verifyRoute = new Hono<{ Bindings: Cloudflare.Env }>()
59 |
60 | verifyRoute.use(
61 | '*',
62 | bodyLimit({
63 | maxSize: 2 * 1024 * 1024, // 2mb
64 | onError: (context) => {
65 | const message = `[requestId: ${context.req.header('X-Tempo-Request-Id')}] Body limit exceeded`
66 |
67 | console.error(message)
68 | return sourcifyError(context, 413, 'body_too_large', message)
69 | },
70 | }),
71 | )
72 |
73 | // POST /v2/verify/metadata/:chainId/:address - Verify Contract (using Solidity metadata.json)
74 | verifyRoute.post('/metadata/:chainId/:address', (context) =>
75 | sourcifyError(
76 | context,
77 | 501,
78 | 'not_implemented',
79 | 'Metadata-based verification is not implemented',
80 | ),
81 | )
82 |
83 | // POST /v2/verify/similarity/:chainId/:address - Verify contract via similarity search
84 | verifyRoute.post('/similarity/:chainId/:address', (context) =>
85 | sourcifyError(
86 | context,
87 | 501,
88 | 'not_implemented',
89 | 'Similarity-based verification is not implemented',
90 | ),
91 | )
92 |
93 | // POST /v2/verify/:chainId/:address - Verify Contract (Standard JSON)
94 | verifyRoute.post('/:chainId/:address', async (context) => {
95 | try {
96 | const { chainId: _chainId, address } = context.req.param()
97 | const body = (await context.req.json()) as {
98 | stdJsonInput: {
99 | language: string
100 | sources: Record<string, { content: string }>
101 | settings: object
102 | }
103 | compilerVersion: string
104 | contractIdentifier: string // e.g., "contracts/Token.sol:Token"
105 | creationTransactionHash?: string
106 | }
107 |
108 | const chainId = Number(_chainId)
109 | if (![DEVNET_CHAIN_ID, TESTNET_CHAIN_ID].includes(chainId)) {
110 | return sourcifyError(
111 | context,
112 | 400,
113 | 'unsupported_chain',
114 | `The chain with chainId ${chainId} is not supported`,
115 | )
116 | }
117 |
118 | if (!Address.validate(address, { strict: true })) {
119 | return sourcifyError(
120 | context,
121 | 400,
122 | 'invalid_address',
123 | `Invalid address: ${address}`,
124 | )
125 | }
126 |
127 | if (
128 | !Object.hasOwn(body, 'stdJsonInput') ||
129 | !Object.hasOwn(body, 'compilerVersion') ||
130 | !Object.hasOwn(body, 'contractIdentifier')
131 | ) {
132 | return sourcifyError(
133 | context,
134 | 400,
135 | 'missing_params',
136 | 'stdJsonInput, compilerVersion, and contractIdentifier are required',
137 | )
138 | }
139 |
140 | const { stdJsonInput, compilerVersion, contractIdentifier } = body
141 |
142 | // Detect language from stdJsonInput
143 | const language = stdJsonInput.language?.toLowerCase() ?? 'solidity'
144 | const isVyper = language === 'vyper'
145 |
146 | // Parse contractIdentifier: "contracts/Token.sol:Token" -> { path: "contracts/Token.sol", name: "Token" }
147 | const lastColonIndex = contractIdentifier.lastIndexOf(':')
148 | if (lastColonIndex === -1) {
149 | return sourcifyError(
150 | context,
151 | 400,
152 | 'invalid_contract_identifier',
153 | 'contractIdentifier must be in format "path/to/Contract.sol:ContractName"',
154 | )
155 | }
156 | const contractPath = contractIdentifier.slice(0, lastColonIndex)
157 | const contractName = contractIdentifier.slice(lastColonIndex + 1)
158 |
159 | // Check if already verified
160 | const db = drizzle(context.env.CONTRACTS_DB)
161 | const addressBytes = Hex.toBytes(address)
162 |
163 | const existingVerification = await db
164 | .select({
165 | matchId: verifiedContractsTable.id,
166 | verifiedAt: verifiedContractsTable.createdAt,
167 | runtimeMatch: verifiedContractsTable.runtimeMatch,
168 | runtimeMetadataMatch: verifiedContractsTable.runtimeMetadataMatch,
169 | })
170 | .from(verifiedContractsTable)
171 | .innerJoin(
172 | contractDeploymentsTable,
173 | eq(verifiedContractsTable.deploymentId, contractDeploymentsTable.id),
174 | )
175 | .where(
176 | and(
177 | eq(contractDeploymentsTable.chainId, chainId),
178 | eq(contractDeploymentsTable.address, addressBytes),
179 | ),
180 | )
181 | .limit(1)
182 |
183 | if (existingVerification.length > 0) {
184 | return context.json(
185 | { verificationId: existingVerification.at(0)?.matchId?.toString() },
186 | 202,
187 | )
188 | }
189 |
190 | const chain = chains[chainId as keyof typeof chains] as unknown as Chain
191 | const client = createPublicClient({
192 | chain,
193 | transport: http(
194 | chain.id === TESTNET_CHAIN_ID
195 | ? 'https://rpc-orchestra.testnet.tempo.xyz'
196 | : undefined,
197 | ),
198 | })
199 |
200 | const onchainBytecode = await client.getCode({ address: address })
201 | if (!onchainBytecode || onchainBytecode === '0x') {
202 | return sourcifyError(
203 | context,
204 | 404,
205 | 'contract_not_found',
206 | `No bytecode found at address ${address} on chain ${chainId}`,
207 | )
208 | }
209 |
210 | // Step 2: Compile via container
211 | const container = getContainer(
212 | context.env.VERIFICATION_CONTAINER,
213 | 'singleton',
214 | )
215 |
216 | // Route to appropriate compiler endpoint based on language
217 | const compileEndpoint = isVyper
218 | ? 'http://container/compile/vyper'
219 | : 'http://container/compile'
220 |
221 | const compileResponse = await container.fetch(
222 | new Request(compileEndpoint, {
223 | method: 'POST',
224 | headers: { 'Content-Type': 'application/json' },
225 | body: JSON.stringify({
226 | compilerVersion,
227 | contractIdentifier,
228 | input: stdJsonInput,
229 | }),
230 | }),
231 | )
232 |
233 | if (!compileResponse.ok) {
234 | const errorText = await compileResponse.text()
235 | return sourcifyError(context, 500, 'compilation_failed', errorText)
236 | }
237 |
238 | const compileOutput = (await compileResponse.json()) as {
239 | contracts?: Record<
240 | string,
241 | Record<
242 | string,
243 | {
244 | abi: Array<{
245 | type: string
246 | name?: string
247 | inputs?: Array<{ type: string; name?: string }>
248 | }>
249 | evm: {
250 | bytecode: {
251 | object: string
252 | linkReferences?: LinkReferences
253 | sourceMap?: string
254 | }
255 | deployedBytecode: {
256 | object: string
257 | linkReferences?: LinkReferences
258 | immutableReferences?: ImmutableReferences
259 | sourceMap?: string
260 | }
261 | }
262 | metadata?: string
263 | storageLayout?: unknown
264 | userdoc?: unknown
265 | devdoc?: unknown
266 | }
267 | >
268 | >
269 | errors?: Array<{
270 | severity: string
271 | message: string
272 | formattedMessage?: string
273 | }>
274 | }
275 |
276 | const errors =
277 | compileOutput.errors?.filter((e) => e.severity === 'error') ?? []
278 | if (errors.length > 0) {
279 | return sourcifyError(
280 | context,
281 | 400,
282 | 'compilation_error',
283 | errors.map((e) => e.formattedMessage ?? e.message).join('\n'),
284 | )
285 | }
286 |
287 | // Step 3: Get compiled bytecode for the target contract
288 | // Try exact path first, then try matching by suffix (for Vyper absolute paths)
289 | let compiledContract =
290 | compileOutput.contracts?.[contractPath]?.[contractName]
291 | let _matchedPath = contractPath
292 |
293 | if (!compiledContract && compileOutput.contracts) {
294 | for (const outputPath of Object.keys(compileOutput.contracts)) {
295 | if (
296 | outputPath.endsWith(contractPath) ||
297 | outputPath.endsWith(`/${contractPath}`)
298 | ) {
299 | compiledContract = compileOutput.contracts[outputPath]?.[contractName]
300 | _matchedPath = outputPath
301 | if (compiledContract) break
302 | }
303 | }
304 | }
305 |
306 | if (!compiledContract) {
307 | return sourcifyError(
308 | context,
309 | 400,
310 | 'contract_not_found_in_output',
311 | `Could not find ${contractName} in ${contractPath}`,
312 | )
313 | }
314 |
315 | const deployedObject = compiledContract.evm.deployedBytecode.object
316 | const bytecodeObject = compiledContract.evm.bytecode.object
317 | const compiledBytecode = deployedObject.startsWith('0x')
318 | ? deployedObject
319 | : `0x${deployedObject}`
320 | const creationBytecodeRaw = bytecodeObject.startsWith('0x')
321 | ? bytecodeObject
322 | : `0x${bytecodeObject}`
323 |
324 | // Step 4: Compare bytecodes using proper matching with transformations
325 | // For Vyper, we need to compute immutable references from auxdata
326 | const auxdataStyle = isVyper
327 | ? getVyperAuxdataStyle(compilerVersion)
328 | : AuxdataStyle.SOLIDITY
329 |
330 | // Vyper doesn't provide immutableReferences in compiler output, we compute them from auxdata
331 | const immutableReferences = isVyper
332 | ? getVyperImmutableReferences(
333 | compilerVersion,
334 | creationBytecodeRaw,
335 | compiledBytecode,
336 | )
337 | : compiledContract.evm.deployedBytecode.immutableReferences
338 |
339 | // Vyper doesn't support libraries
340 | const linkReferences = isVyper
341 | ? undefined
342 | : compiledContract.evm.deployedBytecode.linkReferences
343 |
344 | const runtimeMatchResult = matchBytecode({
345 | onchainBytecode: onchainBytecode,
346 | recompiledBytecode: compiledBytecode,
347 | isCreation: false,
348 | linkReferences,
349 | immutableReferences,
350 | auxdataStyle,
351 | abi: compiledContract.abi,
352 | })
353 |
354 | if (runtimeMatchResult.match === null) {
355 | return sourcifyError(
356 | context,
357 | 400,
358 | 'no_match',
359 | runtimeMatchResult.message ||
360 | 'Compiled bytecode does not match on-chain bytecode',
361 | )
362 | }
363 |
364 | const isExactMatch = runtimeMatchResult.match === 'exact_match'
365 |
366 | const auditUser = 'verification-api'
367 |
368 | // Compute hashes for runtime bytecode
369 | const runtimeBytecodeBytes = Hex.toBytes(compiledBytecode as `0x${string}`)
370 | const runtimeCodeHashSha256 = new Uint8Array(
371 | await globalThis.crypto.subtle.digest(
372 | 'SHA-256',
373 | new TextEncoder().encode(compiledBytecode as `0x${string}`),
374 | ),
375 | )
376 | const runtimeCodeHashKeccak = Hex.toBytes(
377 | keccak256(compiledBytecode as `0x${string}`),
378 | )
379 |
380 | // Compute hashes for creation bytecode (reuse creationBytecodeRaw which already handles 0x prefix)
381 | const creationBytecode = creationBytecodeRaw
382 | const creationBytecodeBytes = Hex.toBytes(creationBytecode as `0x${string}`)
383 | const creationCodeHashSha256 = new Uint8Array(
384 | await globalThis.crypto.subtle.digest(
385 | 'SHA-256',
386 | new TextEncoder().encode(creationBytecode as `0x${string}`),
387 | ),
388 | )
389 | const creationCodeHashKeccak = Hex.toBytes(
390 | keccak256(creationBytecode as `0x${string}`),
391 | )
392 |
393 | // Insert runtime code (ignore if already exists)
394 | await db
395 | .insert(codeTable)
396 | .values({
397 | codeHash: runtimeCodeHashSha256,
398 | codeHashKeccak: runtimeCodeHashKeccak,
399 | code: runtimeBytecodeBytes,
400 | createdBy: auditUser,
401 | updatedBy: auditUser,
402 | })
403 | .onConflictDoNothing()
404 |
405 | // Insert creation code (ignore if already exists)
406 | await db
407 | .insert(codeTable)
408 | .values({
409 | codeHash: creationCodeHashSha256,
410 | codeHashKeccak: creationCodeHashKeccak,
411 | code: creationBytecodeBytes,
412 | createdBy: auditUser,
413 | updatedBy: auditUser,
414 | })
415 | .onConflictDoNothing()
416 |
417 | // Get or create contract
418 | const existingContract = await db
419 | .select({ id: contractsTable.id })
420 | .from(contractsTable)
421 | .where(eq(contractsTable.runtimeCodeHash, runtimeCodeHashSha256))
422 | .limit(1)
423 |
424 | let contractId: string
425 | if (existingContract.length > 0 && existingContract[0]) {
426 | contractId = existingContract[0].id
427 | } else {
428 | contractId = globalThis.crypto.randomUUID()
429 | await db.insert(contractsTable).values({
430 | id: contractId,
431 | creationCodeHash: creationCodeHashSha256,
432 | runtimeCodeHash: runtimeCodeHashSha256,
433 | createdBy: auditUser,
434 | updatedBy: auditUser,
435 | })
436 | }
437 |
438 | // Get or create deployment
439 | const existingDeployment = await db
440 | .select({ id: contractDeploymentsTable.id })
441 | .from(contractDeploymentsTable)
442 | .where(
443 | and(
444 | eq(contractDeploymentsTable.chainId, chainId),
445 | eq(contractDeploymentsTable.address, addressBytes),
446 | ),
447 | )
448 | .limit(1)
449 |
450 | let deploymentId: string
451 | if (existingDeployment.length > 0 && existingDeployment[0]) {
452 | deploymentId = existingDeployment[0].id
453 | } else {
454 | deploymentId = globalThis.crypto.randomUUID()
455 | await db.insert(contractDeploymentsTable).values({
456 | id: deploymentId,
457 | chainId: chainId,
458 | address: addressBytes,
459 | contractId,
460 | createdBy: auditUser,
461 | updatedBy: auditUser,
462 | })
463 | }
464 |
465 | // Get or create compiled contract
466 | const compilerName = isVyper ? 'vyper' : 'solc'
467 | const existingCompilation = await db
468 | .select({ id: compiledContractsTable.id })
469 | .from(compiledContractsTable)
470 | .where(
471 | and(
472 | eq(compiledContractsTable.runtimeCodeHash, runtimeCodeHashSha256),
473 | eq(compiledContractsTable.compiler, compilerName),
474 | eq(compiledContractsTable.version, body.compilerVersion),
475 | ),
476 | )
477 | .limit(1)
478 |
479 | let compilationId: string
480 | if (existingCompilation.length > 0 && existingCompilation[0]) {
481 | compilationId = existingCompilation[0].id
482 | } else {
483 | compilationId = globalThis.crypto.randomUUID()
484 |
485 | // Build code artifacts from compiler output
486 | const creationCodeArtifacts = {
487 | sourceMap: compiledContract.evm.bytecode.sourceMap,
488 | linkReferences: isVyper
489 | ? undefined
490 | : compiledContract.evm.bytecode.linkReferences,
491 | }
492 | const runtimeCodeArtifacts = {
493 | sourceMap: compiledContract.evm.deployedBytecode.sourceMap,
494 | linkReferences,
495 | immutableReferences,
496 | }
497 |
498 | // Build compilation artifacts (ABI, docs, storage layout)
499 | const compilationArtifacts = {
500 | abi: compiledContract.abi,
501 | metadata: compiledContract.metadata,
502 | storageLayout: compiledContract.storageLayout,
503 | userdoc: compiledContract.userdoc,
504 | devdoc: compiledContract.devdoc,
505 | }
506 |
507 | await db.insert(compiledContractsTable).values({
508 | id: compilationId,
509 | compiler: compilerName,
510 | version: body.compilerVersion,
511 | language: stdJsonInput.language,
512 | name: contractName,
513 | fullyQualifiedName: contractIdentifier,
514 | compilerSettings: JSON.stringify(stdJsonInput.settings),
515 | compilationArtifacts: JSON.stringify(compilationArtifacts),
516 | creationCodeHash: creationCodeHashSha256,
517 | creationCodeArtifacts: JSON.stringify(creationCodeArtifacts),
518 | runtimeCodeHash: runtimeCodeHashSha256,
519 | runtimeCodeArtifacts: JSON.stringify(runtimeCodeArtifacts),
520 | createdBy: auditUser,
521 | updatedBy: auditUser,
522 | })
523 | }
524 |
525 | // Insert sources and link them to the compilation (always, even for existing compilations)
526 | for (const [sourcePath, sourceData] of Object.entries(
527 | stdJsonInput.sources,
528 | )) {
529 | const content = sourceData.content
530 | const contentBytes = new TextEncoder().encode(content)
531 | const sourceHashSha256 = new Uint8Array(
532 | await globalThis.crypto.subtle.digest('SHA-256', contentBytes),
533 | )
534 | const sourceHashKeccak = Hex.toBytes(
535 | keccak256(Hex.fromBytes(contentBytes)),
536 | )
537 |
538 | // Insert source (ignore if already exists)
539 | await db
540 | .insert(sourcesTable)
541 | .values({
542 | sourceHash: sourceHashSha256,
543 | sourceHashKeccak: sourceHashKeccak,
544 | content: content,
545 | createdBy: auditUser,
546 | updatedBy: auditUser,
547 | })
548 | .onConflictDoNothing()
549 |
550 | // Link source to compilation with normalized path (convert absolute to relative)
551 | const normalizedPath = normalizeSourcePath(sourcePath)
552 | await db
553 | .insert(compiledContractsSourcesTable)
554 | .values({
555 | id: globalThis.crypto.randomUUID(),
556 | compilationId: compilationId,
557 | sourceHash: sourceHashSha256,
558 | path: normalizedPath,
559 | })
560 | .onConflictDoNothing()
561 | }
562 |
563 | // Extract and insert signatures from ABI
564 | const abi = compiledContract.abi
565 | for (const item of abi) {
566 | let signatureType: SignatureType | null = null
567 | if (item.type === 'function') signatureType = 'function'
568 | else if (item.type === 'event') signatureType = 'event'
569 | else if (item.type === 'error') signatureType = 'error'
570 |
571 | if (signatureType && item.name) {
572 | const inputTypes = (item.inputs ?? []).map((i) => i.type).join(',')
573 | const signature = `${item.name}(${inputTypes})`
574 | const signatureHash32 = Hex.toBytes(
575 | keccak256(Hex.fromString(signature)),
576 | )
577 |
578 | // Insert signature (ignore if exists)
579 | await db
580 | .insert(signaturesTable)
581 | .values({
582 | signatureHash32: signatureHash32,
583 | signature: signature,
584 | })
585 | .onConflictDoNothing()
586 |
587 | // Link signature to compilation
588 | await db
589 | .insert(compiledContractsSignaturesTable)
590 | .values({
591 | id: globalThis.crypto.randomUUID(),
592 | compilationId: compilationId,
593 | signatureHash32: signatureHash32,
594 | signatureType: signatureType,
595 | })
596 | .onConflictDoNothing()
597 | }
598 | }
599 |
600 | // Insert verified contract with transformation data
601 | await db
602 | .insert(verifiedContractsTable)
603 | .values({
604 | deploymentId,
605 | compilationId,
606 | creationMatch: false, // We only verified runtime bytecode
607 | runtimeMatch: true,
608 | runtimeMetadataMatch: isExactMatch,
609 | runtimeValues:
610 | Object.keys(runtimeMatchResult.transformationValues).length > 0
611 | ? JSON.stringify(runtimeMatchResult.transformationValues)
612 | : null,
613 | runtimeTransformations:
614 | runtimeMatchResult.transformations.length > 0
615 | ? JSON.stringify(runtimeMatchResult.transformations)
616 | : null,
617 | createdBy: auditUser,
618 | updatedBy: auditUser,
619 | })
620 | .onConflictDoNothing()
621 |
622 | const verificationResult = await db
623 | .select({ id: verifiedContractsTable.id })
624 | .from(verifiedContractsTable)
625 | .where(eq(verifiedContractsTable.deploymentId, deploymentId))
626 | .limit(1)
627 |
628 | const verificationId =
629 | verificationResult.at(0)?.id?.toString() ?? globalThis.crypto.randomUUID()
630 |
631 | return context.json({ verificationId }, 202)
632 | } catch (error) {
633 | console.error(error)
634 | return sourcifyError(
635 | context,
636 | 500,
637 | 'internal_error',
638 | 'An unexpected error occurred',
639 | )
640 | }
641 | })
642 |
643 | // GET /v2/verify/:verificationId - Check verification job status
644 | verifyRoute.get('/:verificationId', async (context) => {
645 | try {
646 | const { verificationId } = context.req.param()
647 |
648 | const db = drizzle(context.env.CONTRACTS_DB)
649 |
650 | const result = await db
651 | .select({
652 | matchId: verifiedContractsTable.id,
653 | verifiedAt: verifiedContractsTable.createdAt,
654 | runtimeMatch: verifiedContractsTable.runtimeMatch,
655 | creationMatch: verifiedContractsTable.creationMatch,
656 | runtimeMetadataMatch: verifiedContractsTable.runtimeMetadataMatch,
657 | chainId: contractDeploymentsTable.chainId,
658 | address: contractDeploymentsTable.address,
659 | contractName: compiledContractsTable.name,
660 | })
661 | .from(verifiedContractsTable)
662 | .innerJoin(
663 | contractDeploymentsTable,
664 | eq(verifiedContractsTable.deploymentId, contractDeploymentsTable.id),
665 | )
666 | .innerJoin(
667 | compiledContractsTable,
668 | eq(verifiedContractsTable.compilationId, compiledContractsTable.id),
669 | )
670 | .where(eq(verifiedContractsTable.id, Number(verificationId)))
671 | .limit(1)
672 |
673 | if (result.length === 0 || !result[0]) {
674 | return context.json(
675 | {
676 | customCode: 'not_found',
677 | message: `No verification job found for ID ${verificationId}`,
678 | errorId: globalThis.crypto.randomUUID(),
679 | },
680 | 404,
681 | )
682 | }
683 |
684 | const [v] = result
685 | const runtimeMatchStatus = v.runtimeMetadataMatch ? 'exact_match' : 'match'
686 | const creationMatchStatus = v.creationMatch ? 'exact_match' : 'match'
687 |
688 | // Foundry expects this format for completed jobs
689 | return context.json({
690 | isJobCompleted: true,
691 | contract: {
692 | match: runtimeMatchStatus,
693 | creationMatch: creationMatchStatus,
694 | runtimeMatch: runtimeMatchStatus,
695 | chainId: v.chainId,
696 | address: Hex.fromBytes(new Uint8Array(v.address as ArrayBuffer)),
697 | name: v.contractName,
698 | verifiedAt: v.verifiedAt,
699 | },
700 | })
701 | } catch (error) {
702 | console.error(error)
703 | return context.json(
704 | {
705 | customCode: 'internal_error',
706 | message: 'An unexpected error occurred',
707 | errorId: globalThis.crypto.randomUUID(),
708 | },
709 | 500,
710 | )
711 | }
712 | })
713 |
714 | export { verifyRoute }
715 |
--------------------------------------------------------------------------------
/apps/contract-verification/src/bytecode-matching.ts:
--------------------------------------------------------------------------------
1 | import * as CBOR from 'cbor-x'
2 | import { Hex } from 'ox'
3 | import semver from 'semver'
4 | import {
5 | decodeAbiParameters,
6 | encodeAbiParameters,
7 | keccak256,
8 | toBytes,
9 | } from 'viem'
10 |
11 | // ============================================================================
12 | // Types
13 | // ============================================================================
14 |
15 | export type TransformationType =
16 | | 'constructorArguments'
17 | | 'library'
18 | | 'immutable'
19 | | 'cborAuxdata'
20 | | 'callProtection'
21 |
22 | export interface Transformation {
23 | type: 'insert' | 'replace'
24 | reason: TransformationType
25 | offset: number
26 | id?: string
27 | }
28 |
29 | export interface TransformationValues {
30 | constructorArguments?: string
31 | callProtection?: string
32 | libraries?: Record<string, string>
33 | immutables?: Record<string, string>
34 | cborAuxdata?: Record<string, string>
35 | }
36 |
37 | export interface LinkReference {
38 | start: number
39 | length: number
40 | }
41 |
42 | export interface LinkReferences {
43 | [file: string]: {
44 | [library: string]: LinkReference[]
45 | }
46 | }
47 |
48 | export interface ImmutableReference {
49 | start: number
50 | length: number
51 | }
52 |
53 | export interface ImmutableReferences {
54 | [astId: string]: ImmutableReference[]
55 | }
56 |
57 | export interface CborAuxdataPosition {
58 | offset: number
59 | value: string
60 | }
61 |
62 | export interface CborAuxdataPositions {
63 | [id: string]: CborAuxdataPosition
64 | }
65 |
66 | export interface BytecodeMatchResult {
67 | match: 'exact_match' | 'match' | null
68 | transformations: Transformation[]
69 | transformationValues: TransformationValues
70 | libraryMap?: Record<string, string>
71 | message?: string
72 | }
73 |
74 | export interface SolidityDecodedAuxdata {
75 | ipfs?: string
76 | solcVersion?: string
77 | bzzr0?: string
78 | bzzr1?: string
79 | experimental?: boolean
80 | }
81 |
82 | export interface VyperDecodedAuxdata {
83 | integrity?: string
84 | runtimeSize?: number
85 | dataSizes?: number[]
86 | immutableSize?: number
87 | vyperVersion: string
88 | }
89 |
90 | export enum AuxdataStyle {
91 | SOLIDITY = 'solidity',
92 | VYPER = 'vyper',
93 | VYPER_LT_0_3_10 = 'vyper_lt_0_3_10',
94 | VYPER_LT_0_3_5 = 'vyper_lt_0_3_5',
95 | }
96 |
97 | // ============================================================================
98 | // CBOR Auxdata Utilities
99 | // ============================================================================
100 |
101 | /**
102 | * Determines the auxdata style for a Vyper compiler version.
103 | */
104 | export function getVyperAuxdataStyle(
105 | compilerVersion: string,
106 | ):
107 | | AuxdataStyle.VYPER
108 | | AuxdataStyle.VYPER_LT_0_3_10
109 | | AuxdataStyle.VYPER_LT_0_3_5 {
110 | const version = semver.valid(semver.coerce(compilerVersion))
111 | if (!version) {
112 | return AuxdataStyle.VYPER
113 | }
114 | if (semver.lt(version, '0.3.5')) {
115 | return AuxdataStyle.VYPER_LT_0_3_5
116 | }
117 | if (semver.lt(version, '0.3.10')) {
118 | return AuxdataStyle.VYPER_LT_0_3_10
119 | }
120 | return AuxdataStyle.VYPER
121 | }
122 |
123 | /**
124 | * Splits bytecode into execution bytecode and CBOR auxdata.
125 | * Supports both Solidity and Vyper auxdata formats.
126 | * Format: <execution bytecode><cbor data><2 byte length>
127 | */
128 | export function splitAuxdata(
129 | bytecode: string,
130 | auxdataStyle: AuxdataStyle = AuxdataStyle.SOLIDITY,
131 | ): {
132 | executionBytecode: string
133 | auxdata: string | null
134 | cborLength: number
135 | cborLengthHex: string
136 | } {
137 | if (!bytecode || bytecode.length < 4) {
138 | return {
139 | executionBytecode: bytecode,
140 | auxdata: null,
141 | cborLength: 0,
142 | cborLengthHex: '',
143 | }
144 | }
145 |
146 | const code = bytecode.startsWith('0x') ? bytecode : `0x${bytecode}`
147 | const bytesLength = 4 // 2 bytes = 4 hex chars
148 |
149 | // Vyper < 0.3.5 has fixed 22-byte (11 bytes = 22 hex chars) auxdata with no length suffix
150 | if (auxdataStyle === AuxdataStyle.VYPER_LT_0_3_5) {
151 | const fixedAuxdataLength = 22
152 | if (code.length <= fixedAuxdataLength + 2) {
153 | return {
154 | executionBytecode: code,
155 | auxdata: null,
156 | cborLength: 0,
157 | cborLengthHex: '',
158 | }
159 | }
160 | const auxdata = code.slice(-fixedAuxdataLength)
161 | const executionBytecode = code.slice(0, -fixedAuxdataLength)
162 |
163 | // Validate it's CBOR encoded
164 | if (isCborEncoded(auxdata)) {
165 | return {
166 | executionBytecode,
167 | auxdata,
168 | cborLength: fixedAuxdataLength / 2,
169 | cborLengthHex: '',
170 | }
171 | }
172 | return {
173 | executionBytecode: code,
174 | auxdata: null,
175 | cborLength: 0,
176 | cborLengthHex: '',
177 | }
178 | }
179 |
180 | // All other formats have a 2-byte length suffix
181 | const cborLengthHex = code.slice(-bytesLength)
182 | const cborBytesLength = Number.parseInt(cborLengthHex, 16) * 2
183 |
184 | // Validate length
185 | if (
186 | cborBytesLength <= 0 ||
187 | code.length - bytesLength - cborBytesLength <= 0
188 | ) {
189 | return {
190 | executionBytecode: code,
191 | auxdata: null,
192 | cborLength: 0,
193 | cborLengthHex: '',
194 | }
195 | }
196 |
197 | let auxdata: string
198 | let executionBytecode: string
199 |
200 | switch (auxdataStyle) {
201 | case AuxdataStyle.VYPER:
202 | // Vyper >= 0.3.10: length bytes include themselves in the count
203 | auxdata = code.slice(
204 | code.length - cborBytesLength,
205 | code.length - bytesLength,
206 | )
207 | executionBytecode = code.slice(0, code.length - cborBytesLength)
208 | break
209 | default:
210 | // Solidity and Vyper < 0.3.10: length bytes don't include themselves
211 | auxdata = code.slice(
212 | code.length - bytesLength - cborBytesLength,
213 | code.length - bytesLength,
214 | )
215 | executionBytecode = code.slice(
216 | 0,
217 | code.length - bytesLength - cborBytesLength,
218 | )
219 | break
220 | }
221 |
222 | // Validate it's CBOR encoded
223 | if (isCborEncoded(auxdata)) {
224 | return {
225 | executionBytecode,
226 | auxdata,
227 | cborLength: cborBytesLength / 2,
228 | cborLengthHex,
229 | }
230 | }
231 |
232 | return {
233 | executionBytecode: code,
234 | auxdata: null,
235 | cborLength: 0,
236 | cborLengthHex: '',
237 | }
238 | }
239 |
240 | /**
241 | * Attempts to decode the auxdata to verify if it's CBOR-encoded.
242 | */
243 | function isCborEncoded(auxdata: string): boolean {
244 | try {
245 | CBOR.decode(Hex.toBytes(`0x${auxdata}`))
246 | return true
247 | } catch {
248 | return false
249 | }
250 | }
251 |
252 | /**
253 | * Decodes Vyper CBOR auxdata and returns parsed metadata.
254 | */
255 | export function decodeVyperAuxdata(
256 | bytecode: string,
257 | auxdataStyle: AuxdataStyle,
258 | ): VyperDecodedAuxdata {
259 | const { auxdata } = splitAuxdata(bytecode, auxdataStyle)
260 | if (!auxdata) {
261 | throw new Error('Auxdata is not in the bytecode')
262 | }
263 |
264 | const cborDecodedObject = CBOR.decode(Hex.toBytes(`0x${auxdata}`)) as unknown
265 |
266 | if (auxdataStyle === AuxdataStyle.VYPER) {
267 | // Vyper >= 0.3.10 stores auxdata as an array
268 | if (Array.isArray(cborDecodedObject)) {
269 | const lastElement = cborDecodedObject[cborDecodedObject.length - 1] as {
270 | vyper: number[]
271 | }
272 | const compilerVersion = lastElement.vyper.join('.')
273 |
274 | if (semver.gte(compilerVersion, '0.4.1')) {
275 | // >= 0.4.1: [integrity, runtimeSize, dataSizes, immutableSize, {vyper: [v]}]
276 | return {
277 | integrity: cborDecodedObject[0] as string,
278 | runtimeSize: cborDecodedObject[1] as number,
279 | dataSizes: cborDecodedObject[2] as number[],
280 | immutableSize: cborDecodedObject[3] as number,
281 | vyperVersion: compilerVersion,
282 | }
283 | }
284 | // >= 0.3.10: [runtimeSize, dataSizes, immutableSize, {vyper: [v]}]
285 | return {
286 | runtimeSize: cborDecodedObject[0] as number,
287 | dataSizes: cborDecodedObject[1] as number[],
288 | immutableSize: cborDecodedObject[2] as number,
289 | vyperVersion: compilerVersion,
290 | }
291 | }
292 | throw new Error('Invalid Vyper auxdata format for version >= 0.3.10')
293 | }
294 |
295 | // Vyper < 0.3.10: just {vyper: [0, 3, 8]}
296 | const decoded = cborDecodedObject as { vyper?: number[] } | null
297 | if (decoded?.vyper) {
298 | return {
299 | vyperVersion: decoded.vyper.join('.'),
300 | }
301 | }
302 |
303 | throw new Error('Invalid Vyper auxdata format')
304 | }
305 |
306 | /**
307 | * Computes immutable references for Vyper contracts.
308 | * Vyper appends immutables at the end of runtime bytecode (unlike Solidity which has fixed offsets).
309 | */
310 | export function getVyperImmutableReferences(
311 | compilerVersion: string,
312 | creationBytecode: string,
313 | runtimeBytecode: string,
314 | ): ImmutableReferences {
315 | const auxdataStyle = getVyperAuxdataStyle(compilerVersion)
316 |
317 | // Only Vyper >= 0.3.10 has immutable size in auxdata
318 | if (auxdataStyle !== AuxdataStyle.VYPER) {
319 | return {}
320 | }
321 |
322 | try {
323 | const decoded = decodeVyperAuxdata(creationBytecode, auxdataStyle)
324 | if (decoded.immutableSize && decoded.immutableSize > 0) {
325 | const runtimeLength = runtimeBytecode.startsWith('0x')
326 | ? (runtimeBytecode.length - 2) / 2
327 | : runtimeBytecode.length / 2
328 | return {
329 | '0': [
330 | {
331 | length: decoded.immutableSize,
332 | start: runtimeLength,
333 | },
334 | ],
335 | }
336 | }
337 | } catch {
338 | // Cannot decode auxdata, return empty
339 | }
340 |
341 | return {}
342 | }
343 |
344 | /**
345 | * Check if bytecode has CBOR auxdata with a content hash.
346 | * We don't fully decode CBOR, just check for presence of IPFS/bzzr markers.
347 | */
348 | export function hasContentHash(bytecode: string): boolean {
349 | const { auxdata } = splitAuxdata(bytecode)
350 | if (!auxdata) return false
351 |
352 | // Look for IPFS marker: "ipfs" in CBOR = 64697066735822 (text string with length)
353 | // Look for bzzr0/bzzr1 markers
354 | const lower = auxdata.toLowerCase()
355 | return (
356 | lower.includes('6970667358') || // ipfs
357 | lower.includes('62zzr0') || // bzzr0
358 | lower.includes('62zzr1') // bzzr1
359 | )
360 | }
361 |
362 | // ============================================================================
363 | // Library Placeholder Handling
364 | // ============================================================================
365 |
366 | /**
367 | * Library placeholders in unlinked bytecode look like:
368 | * - Post v0.5.0: __$<keccak256(fqn).slice(0,34)>$__
369 | * - Pre v0.5.0: __<fqn padded to 36 chars>__
370 | * - Zeroed: 0x0000000000000000000000000000000000000000 (40 zeros)
371 | */
372 | export function extractLibrariesTransformation(
373 | recompiledBytecode: string,
374 | onchainBytecode: string,
375 | linkReferences: LinkReferences | undefined,
376 | ): {
377 | populatedBytecode: string
378 | transformations: Transformation[]
379 | transformationValues: TransformationValues
380 | libraryMap: Record<string, string>
381 | } {
382 | const transformations: Transformation[] = []
383 | const transformationValues: TransformationValues = {}
384 | const libraryMap: Record<string, string> = {}
385 |
386 | if (!linkReferences || Object.keys(linkReferences).length === 0) {
387 | return {
388 | populatedBytecode: recompiledBytecode,
389 | transformations,
390 | transformationValues,
391 | libraryMap,
392 | }
393 | }
394 |
395 | let populatedBytecode = recompiledBytecode
396 |
397 | for (const file of Object.keys(linkReferences)) {
398 | const fileRefs = linkReferences[file]
399 | if (!fileRefs) continue
400 |
401 | for (const lib of Object.keys(fileRefs)) {
402 | const libRefs = fileRefs[lib]
403 | if (!libRefs) continue
404 |
405 | const fqn = `${file}:${lib}` // Fully Qualified Name
406 |
407 | for (const ref of libRefs) {
408 | const { start, length } = ref
409 | const strStart = start * 2 + 2 // Each byte = 2 hex chars, +2 for 0x
410 | const strLength = length * 2
411 |
412 | const placeholder = populatedBytecode.slice(
413 | strStart,
414 | strStart + strLength,
415 | )
416 |
417 | // Calculate expected placeholders
418 | const fqnHash = keccak256(toBytes(fqn))
419 | const postV050Placeholder = `__$${fqnHash.slice(2, 36)}$__`
420 | const trimmedFQN = fqn.slice(0, 36)
421 | const preV050Placeholder = `__${trimmedFQN.padEnd(38, '_')}`
422 | const zeroedPlaceholder = '0'.repeat(40)
423 |
424 | // Validate placeholder matches expected format
425 | if (
426 | placeholder !== postV050Placeholder &&
427 | placeholder !== preV050Placeholder &&
428 | placeholder !== zeroedPlaceholder
429 | ) {
430 | throw new Error(
431 | `Library placeholder mismatch for ${fqn}: got ${placeholder}`,
432 | )
433 | }
434 |
435 | // Extract actual library address from onchain bytecode
436 | const actualAddress = onchainBytecode.slice(
437 | strStart,
438 | strStart + strLength,
439 | )
440 | libraryMap[fqn] = `0x${actualAddress}`
441 |
442 | // Replace placeholder with actual address
443 | populatedBytecode =
444 | populatedBytecode.slice(0, strStart) +
445 | actualAddress +
446 | populatedBytecode.slice(strStart + strLength)
447 |
448 | transformations.push({
449 | type: 'replace',
450 | reason: 'library',
451 | offset: start,
452 | id: fqn,
453 | })
454 |
455 | if (!transformationValues.libraries) {
456 | transformationValues.libraries = {}
457 | }
458 | transformationValues.libraries[fqn] = `0x${actualAddress}`
459 | }
460 | }
461 | }
462 |
463 | return {
464 | populatedBytecode,
465 | transformations,
466 | transformationValues,
467 | libraryMap,
468 | }
469 | }
470 |
471 | // ============================================================================
472 | // Immutable Variable Handling
473 | // ============================================================================
474 |
475 | /**
476 | * Immutable variables are replaced with zeros in compiled bytecode.
477 | * We need to extract their actual values from onchain bytecode and
478 | * replace the zeros with those values for matching.
479 | *
480 | * For Solidity: immutables are at fixed offsets, we replace zeros with actual values.
481 | * For Vyper: immutables are appended at the end of runtime bytecode, we insert them.
482 | */
483 | export function extractImmutablesTransformation(
484 | recompiledBytecode: string,
485 | onchainBytecode: string,
486 | immutableReferences: ImmutableReferences | undefined,
487 | auxdataStyle: AuxdataStyle = AuxdataStyle.SOLIDITY,
488 | ): {
489 | populatedBytecode: string
490 | transformations: Transformation[]
491 | transformationValues: TransformationValues
492 | } {
493 | const transformations: Transformation[] = []
494 | const transformationValues: TransformationValues = {}
495 |
496 | if (!immutableReferences || Object.keys(immutableReferences).length === 0) {
497 | return {
498 | populatedBytecode: recompiledBytecode,
499 | transformations,
500 | transformationValues,
501 | }
502 | }
503 |
504 | // Remove 0x prefix for manipulation
505 | let bytecodeNoPrefix = recompiledBytecode.startsWith('0x')
506 | ? recompiledBytecode.slice(2)
507 | : recompiledBytecode
508 | const onchainNoPrefix = onchainBytecode.startsWith('0x')
509 | ? onchainBytecode.slice(2)
510 | : onchainBytecode
511 |
512 | const isVyper =
513 | auxdataStyle === AuxdataStyle.VYPER ||
514 | auxdataStyle === AuxdataStyle.VYPER_LT_0_3_10 ||
515 | auxdataStyle === AuxdataStyle.VYPER_LT_0_3_5
516 |
517 | for (const astId of Object.keys(immutableReferences)) {
518 | const refs = immutableReferences[astId]
519 | if (!refs) continue
520 |
521 | for (const ref of refs) {
522 | const { start, length } = ref
523 | const strStart = start * 2
524 | const strLength = length * 2
525 |
526 | // Extract immutable value from onchain bytecode
527 | const immutableValue = onchainNoPrefix.slice(
528 | strStart,
529 | strStart + strLength,
530 | )
531 |
532 | if (isVyper) {
533 | // Vyper: immutables are appended at the end, insert them
534 | bytecodeNoPrefix = bytecodeNoPrefix + immutableValue
535 |
536 | transformations.push({
537 | type: 'insert',
538 | reason: 'immutable',
539 | offset: start,
540 | id: astId,
541 | })
542 | } else {
543 | // Solidity: immutables are at fixed offsets, replace zeros with actual value
544 | bytecodeNoPrefix =
545 | bytecodeNoPrefix.slice(0, strStart) +
546 | immutableValue +
547 | bytecodeNoPrefix.slice(strStart + strLength)
548 |
549 | transformations.push({
550 | type: 'replace',
551 | reason: 'immutable',
552 | offset: start,
553 | id: astId,
554 | })
555 | }
556 |
557 | if (!transformationValues.immutables) {
558 | transformationValues.immutables = {}
559 | }
560 | transformationValues.immutables[astId] = `0x${immutableValue}`
561 | }
562 | }
563 |
564 | return {
565 | populatedBytecode: `0x${bytecodeNoPrefix}`,
566 | transformations,
567 | transformationValues,
568 | }
569 | }
570 |
571 | // ============================================================================
572 | // Call Protection Handling (for Libraries)
573 | // ============================================================================
574 |
575 | /**
576 | * Libraries deployed as standalone contracts have "call protection"
577 | * at the start: PUSH20 followed by the library address (20 bytes of zeros in compiled).
578 | * Format: 0x73 + 20 bytes (address) at position 0
579 | */
580 | export function extractCallProtectionTransformation(
581 | recompiledBytecode: string,
582 | onchainBytecode: string,
583 | ): {
584 | populatedBytecode: string
585 | transformations: Transformation[]
586 | transformationValues: TransformationValues
587 | } {
588 | const transformations: Transformation[] = []
589 | const transformationValues: TransformationValues = {}
590 |
591 | // PUSH20 opcode (0x73) followed by 20 zero bytes
592 | const callProtectionPlaceholder = `0x73${'00'.repeat(20)}`
593 |
594 | if (recompiledBytecode.toLowerCase().startsWith(callProtectionPlaceholder)) {
595 | // Extract actual address from onchain bytecode
596 | const actualProtection = onchainBytecode.slice(
597 | 0,
598 | callProtectionPlaceholder.length,
599 | )
600 | const actualAddress = actualProtection.slice(4) // Remove 0x73
601 |
602 | transformations.push({
603 | type: 'replace',
604 | reason: 'callProtection',
605 | offset: 1, // After the PUSH20 opcode
606 | })
607 | transformationValues.callProtection = `0x${actualAddress}`
608 |
609 | const populatedBytecode =
610 | actualProtection +
611 | recompiledBytecode.slice(callProtectionPlaceholder.length)
612 |
613 | return { populatedBytecode, transformations, transformationValues }
614 | }
615 |
616 | return {
617 | populatedBytecode: recompiledBytecode,
618 | transformations,
619 | transformationValues,
620 | }
621 | }
622 |
623 | // ============================================================================
624 | // Constructor Arguments Handling
625 | // ============================================================================
626 |
627 | /**
628 | * Constructor arguments are ABI-encoded and appended to creation bytecode.
629 | * To match, we extract them from the difference in bytecode lengths.
630 | */
631 | export function extractConstructorArgumentsTransformation(
632 | recompiledCreationBytecode: string,
633 | onchainCreationBytecode: string,
634 | abi: Array<{
635 | type: string
636 | inputs?: Array<{ type: string; name?: string }>
637 | }>,
638 | ): {
639 | populatedBytecode: string
640 | transformations: Transformation[]
641 | transformationValues: TransformationValues
642 | constructorArguments: string | null
643 | } {
644 | const transformations: Transformation[] = []
645 | const transformationValues: TransformationValues = {}
646 |
647 | // If lengths are equal, no constructor arguments
648 | if (onchainCreationBytecode.length === recompiledCreationBytecode.length) {
649 | return {
650 | populatedBytecode: recompiledCreationBytecode,
651 | transformations,
652 | transformationValues,
653 | constructorArguments: null,
654 | }
655 | }
656 |
657 | // Extract potential constructor arguments
658 | const argsHex = onchainCreationBytecode.slice(
659 | recompiledCreationBytecode.length,
660 | )
661 | const constructorArguments = `0x${argsHex}` as `0x${string}`
662 |
663 | // Find constructor in ABI
664 | const constructorAbi = abi.find((item) => item.type === 'constructor')
665 | if (!constructorAbi?.inputs || constructorAbi.inputs.length === 0) {
666 | // No constructor params expected but we have extra bytes
667 | // This could be a mismatch or special case
668 | return {
669 | populatedBytecode: recompiledCreationBytecode,
670 | transformations,
671 | transformationValues,
672 | constructorArguments,
673 | }
674 | }
675 |
676 | // Validate by decoding and re-encoding
677 | try {
678 | const paramTypes = constructorAbi.inputs.map((i) => ({
679 | type: i.type,
680 | name: i.name,
681 | }))
682 | const decoded = decodeAbiParameters(paramTypes, constructorArguments)
683 | const reencoded = encodeAbiParameters(paramTypes, decoded as unknown[])
684 |
685 | if (reencoded.toLowerCase() !== constructorArguments.toLowerCase()) {
686 | throw new Error('Constructor arguments mismatch after re-encoding')
687 | }
688 |
689 | transformations.push({
690 | type: 'insert',
691 | reason: 'constructorArguments',
692 | offset: (recompiledCreationBytecode.length - 2) / 2, // Offset in bytes
693 | })
694 | transformationValues.constructorArguments = constructorArguments
695 |
696 | return {
697 | populatedBytecode: recompiledCreationBytecode,
698 | transformations,
699 | transformationValues,
700 | constructorArguments,
701 | }
702 | } catch {
703 | // Failed to decode/validate constructor arguments
704 | return {
705 | populatedBytecode: recompiledCreationBytecode,
706 | transformations,
707 | transformationValues,
708 | constructorArguments,
709 | }
710 | }
711 | }
712 |
713 | // ============================================================================
714 | // CBOR Auxdata Transformation
715 | // ============================================================================
716 |
717 | /**
718 | * Replace CBOR auxdata sections in recompiled bytecode with values from onchain.
719 | * This allows partial matching when only metadata differs.
720 | */
721 | export function extractAuxdataTransformation(
722 | recompiledBytecode: string,
723 | onchainBytecode: string,
724 | cborAuxdataPositions: CborAuxdataPositions | undefined,
725 | ): {
726 | populatedBytecode: string
727 | transformations: Transformation[]
728 | transformationValues: TransformationValues
729 | } {
730 | const transformations: Transformation[] = []
731 | const transformationValues: TransformationValues = {}
732 |
733 | if (!cborAuxdataPositions || Object.keys(cborAuxdataPositions).length === 0) {
734 | return {
735 | populatedBytecode: recompiledBytecode,
736 | transformations,
737 | transformationValues,
738 | }
739 | }
740 |
741 | let populatedBytecode = recompiledBytecode
742 |
743 | for (const [id, auxdata] of Object.entries(cborAuxdataPositions)) {
744 | const { offset, value } = auxdata
745 | const strStart = offset * 2 + 2 // +2 for 0x
746 | const strEnd = strStart + value.length - 2 // -2 because value includes 0x
747 |
748 | // Get corresponding section from onchain bytecode
749 | const onchainAuxdata = onchainBytecode.slice(strStart, strEnd)
750 |
751 | if (onchainAuxdata.length === 0) {
752 | // Onchain bytecode might have auxdata disabled (has 0xff terminator)
753 | // Remove the auxdata section entirely
754 | populatedBytecode =
755 | populatedBytecode.slice(0, strStart - 2) +
756 | populatedBytecode.slice(strEnd)
757 | } else {
758 | // Replace with onchain auxdata
759 | populatedBytecode =
760 | populatedBytecode.slice(0, strStart) +
761 | onchainAuxdata +
762 | populatedBytecode.slice(strEnd)
763 | }
764 |
765 | transformations.push({
766 | type: 'replace',
767 | reason: 'cborAuxdata',
768 | offset,
769 | id,
770 | })
771 |
772 | if (!transformationValues.cborAuxdata) {
773 | transformationValues.cborAuxdata = {}
774 | }
775 | transformationValues.cborAuxdata[id] = `0x${onchainAuxdata}`
776 | }
777 |
778 | return {
779 | populatedBytecode,
780 | transformations,
781 | transformationValues,
782 | }
783 | }
784 |
785 | // ============================================================================
786 | // Main Bytecode Matching
787 | // ============================================================================
788 |
789 | export interface MatchBytecodeOptions {
790 | onchainBytecode: string
791 | recompiledBytecode: string
792 | isCreation: boolean
793 | linkReferences?: LinkReferences
794 | immutableReferences?: ImmutableReferences
795 | cborAuxdataPositions?: CborAuxdataPositions
796 | auxdataStyle?: AuxdataStyle
797 | abi?: Array<{
798 | type: string
799 | inputs?: Array<{ type: string; name?: string }>
800 | }>
801 | }
802 |
803 | /**
804 | * Main function to match recompiled bytecode against onchain bytecode.
805 | * Handles libraries, immutables, call protection, constructor args, and metadata.
806 | */
807 | export function matchBytecode(
808 | options: MatchBytecodeOptions,
809 | ): BytecodeMatchResult {
810 | const {
811 | onchainBytecode,
812 | recompiledBytecode,
813 | isCreation,
814 | linkReferences,
815 | immutableReferences,
816 | cborAuxdataPositions,
817 | auxdataStyle = AuxdataStyle.SOLIDITY,
818 | abi,
819 | } = options
820 |
821 | const allTransformations: Transformation[] = []
822 | const allTransformationValues: TransformationValues = {}
823 | let populatedBytecode = recompiledBytecode
824 | let libraryMap: Record<string, string> = {}
825 |
826 | // 1. Handle call protection (runtime only, for libraries)
827 | if (!isCreation) {
828 | const callProtectionResult = extractCallProtectionTransformation(
829 | populatedBytecode,
830 | onchainBytecode,
831 | )
832 | populatedBytecode = callProtectionResult.populatedBytecode
833 | allTransformations.push(...callProtectionResult.transformations)
834 | Object.assign(
835 | allTransformationValues,
836 | callProtectionResult.transformationValues,
837 | )
838 | }
839 |
840 | // 2. Handle immutables (runtime only)
841 | if (!isCreation && immutableReferences) {
842 | const immutablesResult = extractImmutablesTransformation(
843 | populatedBytecode,
844 | onchainBytecode,
845 | immutableReferences,
846 | auxdataStyle,
847 | )
848 | populatedBytecode = immutablesResult.populatedBytecode
849 | allTransformations.push(...immutablesResult.transformations)
850 | Object.assign(
851 | allTransformationValues,
852 | immutablesResult.transformationValues,
853 | )
854 | }
855 |
856 | // 3. Handle library placeholders (both creation and runtime)
857 | if (linkReferences) {
858 | const librariesResult = extractLibrariesTransformation(
859 | populatedBytecode,
860 | onchainBytecode,
861 | linkReferences,
862 | )
863 | populatedBytecode = librariesResult.populatedBytecode
864 | allTransformations.push(...librariesResult.transformations)
865 | Object.assign(allTransformationValues, librariesResult.transformationValues)
866 | libraryMap = librariesResult.libraryMap
867 | }
868 |
869 | // 4. Check for direct match
870 | const doBytecodesMatch = isCreation
871 | ? onchainBytecode.toLowerCase().startsWith(populatedBytecode.toLowerCase())
872 | : populatedBytecode.toLowerCase() === onchainBytecode.toLowerCase()
873 |
874 | if (doBytecodesMatch) {
875 | // Check if this is a "perfect" match (has valid content hash) or "partial" (no hash)
876 | const isPerfect = hasContentHash(recompiledBytecode)
877 |
878 | // For creation bytecode, also extract constructor arguments
879 | if (isCreation && abi) {
880 | const constructorResult = extractConstructorArgumentsTransformation(
881 | populatedBytecode,
882 | onchainBytecode,
883 | abi,
884 | )
885 | allTransformations.push(...constructorResult.transformations)
886 | Object.assign(
887 | allTransformationValues,
888 | constructorResult.transformationValues,
889 | )
890 | }
891 |
892 | return {
893 | match: isPerfect ? 'exact_match' : 'match',
894 | transformations: allTransformations,
895 | transformationValues: allTransformationValues,
896 | libraryMap: Object.keys(libraryMap).length > 0 ? libraryMap : undefined,
897 | }
898 | }
899 |
900 | // 5. Try partial match by replacing CBOR auxdata
901 | if (cborAuxdataPositions && Object.keys(cborAuxdataPositions).length > 0) {
902 | const auxdataResult = extractAuxdataTransformation(
903 | populatedBytecode,
904 | onchainBytecode,
905 | cborAuxdataPositions,
906 | )
907 | const populatedWithAuxdata = auxdataResult.populatedBytecode
908 |
909 | const doPopulatedMatch = isCreation
910 | ? onchainBytecode
911 | .toLowerCase()
912 | .startsWith(populatedWithAuxdata.toLowerCase())
913 | : populatedWithAuxdata.toLowerCase() === onchainBytecode.toLowerCase()
914 |
915 | if (doPopulatedMatch) {
916 | allTransformations.push(...auxdataResult.transformations)
917 | Object.assign(allTransformationValues, auxdataResult.transformationValues)
918 |
919 | // For creation bytecode, extract constructor arguments
920 | if (isCreation && abi) {
921 | const constructorResult = extractConstructorArgumentsTransformation(
922 | populatedWithAuxdata,
923 | onchainBytecode,
924 | abi,
925 | )
926 | allTransformations.push(...constructorResult.transformations)
927 | Object.assign(
928 | allTransformationValues,
929 | constructorResult.transformationValues,
930 | )
931 | }
932 |
933 | return {
934 | match: 'match',
935 | transformations: allTransformations,
936 | transformationValues: allTransformationValues,
937 | libraryMap: Object.keys(libraryMap).length > 0 ? libraryMap : undefined,
938 | }
939 | }
940 | }
941 |
942 | // 6. No match - try one more thing: strip metadata and compare
943 | const { executionBytecode: onchainExec } = splitAuxdata(
944 | onchainBytecode,
945 | auxdataStyle,
946 | )
947 | const { executionBytecode: recompiledExec } = splitAuxdata(
948 | populatedBytecode,
949 | auxdataStyle,
950 | )
951 |
952 | if (
953 | onchainExec &&
954 | recompiledExec &&
955 | onchainExec.toLowerCase() === recompiledExec.toLowerCase()
956 | ) {
957 | return {
958 | match: 'match',
959 | transformations: allTransformations,
960 | transformationValues: allTransformationValues,
961 | libraryMap: Object.keys(libraryMap).length > 0 ? libraryMap : undefined,
962 | message: 'Matched after stripping metadata',
963 | }
964 | }
965 |
966 | // No match
967 | return {
968 | match: null,
969 | transformations: allTransformations,
970 | transformationValues: allTransformationValues,
971 | libraryMap: Object.keys(libraryMap).length > 0 ? libraryMap : undefined,
972 | message: 'Bytecodes do not match',
973 | }
974 | }
975 |
976 | // ============================================================================
977 | // Simplified Matching for Quick Verification
978 | // ============================================================================
979 |
980 | /**
981 | * Simplified matching that doesn't require compiler output details.
982 | * Uses heuristic metadata stripping for partial matching.
983 | */
984 | export function matchBytecodeSimple(
985 | onchainBytecode: string,
986 | compiledBytecode: string,
987 | ): { match: 'exact_match' | 'match' | null; message?: string } {
988 | const onchain = onchainBytecode.toLowerCase()
989 | const compiled = compiledBytecode.toLowerCase()
990 |
991 | // Exact match
992 | if (onchain === compiled) {
993 | return { match: 'exact_match' }
994 | }
995 |
996 | // Try stripping CBOR metadata
997 | const { executionBytecode: onchainExec, auxdata: onchainAux } =
998 | splitAuxdata(onchainBytecode)
999 | const { executionBytecode: compiledExec, auxdata: compiledAux } =
1000 | splitAuxdata(compiledBytecode)
1001 |
1002 | // Both have auxdata and execution matches
1003 | if (
1004 | onchainAux &&
1005 | compiledAux &&
1006 | onchainExec.toLowerCase() === compiledExec.toLowerCase()
1007 | ) {
1008 | return { match: 'match', message: 'Matched with different metadata' }
1009 | }
1010 |
1011 | // Only compiled has auxdata (onchain might have it stripped)
1012 | if (
1013 | compiledAux &&
1014 | !onchainAux &&
1015 | onchain.startsWith(compiledExec.toLowerCase())
1016 | ) {
1017 | return { match: 'match', message: 'Onchain bytecode has no metadata' }
1018 | }
1019 |
1020 | return { match: null, message: 'No match found' }
1021 | }
1022 |
--------------------------------------------------------------------------------
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment