Created
October 29, 2024 07:21
-
-
Save cassc/efbf60f4bb5b15bc4d9e5922d1d6667f to your computer and use it in GitHub Desktop.
Get transation stats by foundry
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import json | |
import subprocess | |
from web3 import Web3 | |
import os | |
import plotly.express as px | |
import plotly.io as pio | |
import pandas as pd | |
import concurrent.futures | |
num_processes = os.cpu_count() | |
block_numbers = list(range(19036989 + 60000, 19036989 + 60000 + 20)) + list(range(19036989-2160000, 19036989-2160000 + 20)) + list(range(19036989 - 100, 19036989-100 + 20)) | |
rpc_endpoint = os.environ['ETH_RPC_ALCHEMY_ENDPOINT'] | |
no_rate_limit = True | |
web3 = Web3(Web3.HTTPProvider(rpc_endpoint)) | |
output_dir = f'{os.environ["HOME"]}/tmp/stats' | |
os.makedirs(output_dir, exist_ok=True) | |
def transactions_in_block(block_number): | |
block = web3.eth.get_block(block_number) | |
return block['transactions'] | |
def process_trace(stats, data): | |
arena_list = data['arena'] | |
arena_steps = [arena['trace']['steps'] for arena in arena_list] | |
all_steps = [s for steps in arena_steps for s in steps] | |
max_stack_size = 0 | |
max_memory_size = 0 | |
max_depth = 0 | |
# storage_read_count = 0 | |
# storage_write_count = 0 | |
# stack_write_count = 0 | |
op_stats = {} | |
accessed_addresses = set() | |
for step in all_steps: | |
op = step['op'] | |
max_depth = max(max_depth, step['depth']) | |
max_stack_size = max(max_stack_size, len(step['stack'])) | |
max_memory_size = max(max_memory_size, (len(step['memory']) - 2) / 2) | |
op_stats[op] = op_stats.get(op, 0) + 1 | |
accessed_addresses.add(step.get('contract')) | |
# if op == 0x55: | |
# storage_write_count += 1 | |
# elif op == 0x54: | |
# storage_read_count += 1 | |
# elif op >= 0x5f and op <= 0x9f: | |
# stack_write_count += 1 | |
stats.append(dict(max_stack_size=max_stack_size, | |
max_memory_size=max_memory_size, | |
op_stats=op_stats, | |
max_depth=max_depth, | |
num_accessed_addresses=len(accessed_addresses))) | |
#cast run 0x142a06071c8717c268e0401fe7e97e66cbebacb922e72d625227433bbf356520 -r $ETH_RPC_ALCHEMY_ENDPOINT --decode-internal -j | |
def run_tx(stats, txhash): | |
txhash = txhash if isinstance(txhash, str) else f'0x{txhash.hex()}' | |
print('Loading traces for:', txhash) | |
cmd = ['cast', 'run', txhash, '-r', rpc_endpoint, '--decode-internal', '-j', '--with-state-changes'] | |
if no_rate_limit: | |
cmd.append('--no-rate-limit') | |
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, text=True) | |
stdout, stderr = proc.communicate() | |
proc.wait() | |
print('Trace loaded for:', txhash) | |
data = None | |
for line in stdout.splitlines(): | |
try: | |
data = json.loads(line) | |
break | |
except json.JSONDecodeError: | |
continue | |
if not data: | |
print('No trace for transaction:', txhash) | |
return | |
process_trace(stats, data) | |
def gen_out_json_filename(block_number): | |
return f'{output_dir}/stats-block-{block_number}.json' | |
def run_block(block_number): | |
out_json = gen_out_json_filename(block_number) | |
if os.path.exists(out_json): | |
print(f'Stats for block {block_number} already exists at {out_json}') | |
return | |
txs = transactions_in_block(block_number) | |
print(f'Processing block {block_number} with {len(txs)} transactions') | |
stats = [] | |
for tx in txs: | |
run_tx(stats, tx) | |
with open(out_json, 'w') as f: | |
json.dump({block_number: stats}, f) | |
print(f'Stats for block {block_number} written to {out_json}') | |
def read_stats(stats_file_name): | |
with open(f'{output_dir}/{stats_file_name}', 'r') as f: | |
return json.load(f) | |
def plot_stats(): | |
files = [f for f in os.listdir(output_dir) if f.endswith('.json')] | |
block_stats = [read_stats(f) for f in files if f.startswith('stats-block-')] | |
block_stats = [list(s.values())[0] for s in block_stats if s] | |
flattened_stats = [s for stats in block_stats for s in stats] | |
df = pd.DataFrame(flattened_stats) | |
fig = px.scatter_matrix(df, dimensions=["max_stack_size", "max_memory_size", "max_depth", "num_accessed_addresses"], | |
title="Scatter Matrix of Metrics", | |
labels={"max_stack_size": "Max Stack Size", "max_memory_size": "Max Memory Size", | |
"max_depth": "Max Depth", "num_accessed_addresses": "Num Accessed Addresses"}) | |
fig.update_layout(width=1900, height=1050) | |
pio.write_html(fig, file=f"{output_dir}/scatter_matrix.html", auto_open=False) | |
def print_stats(): | |
files = [f for f in os.listdir(output_dir) if f.endswith('.json')] | |
block_stats = [read_stats(f) for f in files if f.startswith('stats-block-')] | |
block_stats = [list(s.values())[0] for s in block_stats if s] | |
stats_data = [s for stats in block_stats for s in stats] | |
flattened_stats = [] | |
for item in stats_data: | |
flat_item = item.copy() | |
for op, value in item["op_stats"].items(): | |
op = int(op) | |
key = f"op_{op:02X}" | |
flat_item[key] = value | |
del flat_item["op_stats"] | |
flattened_stats.append(flat_item) | |
# median of max_stack_size, etc | |
df = pd.DataFrame(flattened_stats) | |
with pd.option_context('display.max_rows', None, 'display.max_columns', None): | |
print('Median of all stats') | |
print(df.median()) | |
print() | |
print('Mean of all stats') | |
print(df.mean()) | |
print() | |
print('Max of all stats') | |
print(df.max()) | |
print() | |
def process_blocks(block_numbers): | |
with concurrent.futures.ThreadPoolExecutor(max_workers=num_processes) as executor: | |
future_to_block = {executor.submit(run_block, block_number): block_number for block_number in block_numbers} | |
for future in concurrent.futures.as_completed(future_to_block): | |
block_number = future_to_block[future] | |
try: | |
future.result() # This will raise an exception if the block failed to run | |
except Exception as e: | |
print(f'Error processing block {block_number}:', e) | |
if __name__ == '__main__': | |
process_blocks(block_numbers) | |
plot_stats() | |
print_stats() |
Author
cassc
commented
Oct 29, 2024
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment