Skip to content

Commit 2a27638

Browse files
committed
Add local hive test runner script
1 parent f4ec98f commit 2a27638

File tree

1 file changed

+188
-0
lines changed

1 file changed

+188
-0
lines changed

scripts/run-hive-local.py

Lines changed: 188 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,188 @@
1+
#!/usr/bin/env python3
2+
"""Local hive test runner - parses scenarios from .github/workflows/hive.yml
3+
4+
Usage:
5+
./scripts/run-hive-local.py # List available scenarios
6+
./scripts/run-hive-local.py --all # Run all scenarios
7+
./scripts/run-hive-local.py smoke/genesis # Run specific scenario
8+
./scripts/run-hive-local.py --skip-build smoke/genesis # Skip docker build
9+
"""
10+
11+
import argparse
12+
import os
13+
import subprocess
14+
import sys
15+
import time
16+
from pathlib import Path
17+
18+
import yaml
19+
20+
BERA_RETH_DIR = Path(__file__).parent.resolve().parent
21+
HIVE_WORKFLOW = BERA_RETH_DIR / ".github/workflows/hive.yml"
22+
HIVE_DIR = Path(os.environ.get("HIVE_DIR", Path.home() / "workspace/hive"))
23+
24+
25+
def load_scenarios():
26+
with open(HIVE_WORKFLOW) as f:
27+
workflow = yaml.safe_load(f)
28+
return workflow["jobs"]["test"]["strategy"]["matrix"]["scenario"]
29+
30+
31+
def get_filter(scenario):
32+
limit = scenario.get("limit", "")
33+
include = scenario.get("include", [])
34+
tests = "|".join(include) if include else ""
35+
36+
if limit and tests:
37+
return f"{limit}/{tests}"
38+
elif limit:
39+
return limit
40+
elif tests:
41+
return f"/{tests}"
42+
return ""
43+
44+
45+
def list_scenarios(scenarios):
46+
print("Available scenarios (from hive.yml):")
47+
for s in scenarios:
48+
sim = s["sim"]
49+
limit = s.get("limit", "")
50+
print(f" {sim}\t{limit}" if limit else f" {sim}")
51+
print()
52+
print("Usage: run-hive-local.py [--skip-build] [--all | <sim> [limit]]")
53+
54+
55+
def find_scenario(scenarios, sim, limit_arg):
56+
for s in scenarios:
57+
if s["sim"] != sim:
58+
continue
59+
limit = s.get("limit", "")
60+
if limit_arg and limit != limit_arg:
61+
continue
62+
if not limit_arg and limit:
63+
continue
64+
return s
65+
return None
66+
67+
68+
def run_scenario(sim, filter_str):
69+
print()
70+
print(f"==> Running: {sim}" + (f" (filter: {filter_str})" if filter_str else ""))
71+
72+
os.chdir(HIVE_DIR)
73+
args = ["./hive", "--sim", sim, "--client", "bera-reth", "--sim.parallelism", "8"]
74+
if filter_str:
75+
args.extend(["--sim.limit", filter_str])
76+
77+
start_time = time.time()
78+
79+
# The hive process returns non-zero exit code when tests fail, even on expected
80+
# failures so we need to parse the JSON to check if failures are expected
81+
result = subprocess.run(args)
82+
83+
# Find JSON files created after we started
84+
logs_dir = HIVE_DIR / "workspace/logs"
85+
json_files = [f for f in logs_dir.glob("*.json") if f.name != "hive.json" and f.stat().st_mtime > start_time]
86+
87+
# If hive failed and no results generated, it crashed
88+
if result.returncode != 0 and not json_files:
89+
print(f"Hive crashed with exit code {result.returncode}")
90+
return False
91+
92+
if not json_files:
93+
print("No JSON results found")
94+
return True
95+
96+
# Get the newest json file (note: don't run this script in parallel!)
97+
json_file = max(json_files, key=lambda p: p.stat().st_mtime)
98+
print(f"Validating: {json_file.name}")
99+
100+
hive_assets = BERA_RETH_DIR / ".github/assets/hive"
101+
result = subprocess.run(
102+
[
103+
"python3",
104+
str(hive_assets / "parse.py"),
105+
str(json_file),
106+
"--exclusion",
107+
str(hive_assets / "expected_failures.yaml"),
108+
"--ignored",
109+
str(hive_assets / "ignored_tests.yaml"),
110+
]
111+
)
112+
return result.returncode == 0
113+
114+
115+
def main():
116+
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
117+
parser.add_argument("--skip-build", action="store_true", help="Skip Docker build")
118+
parser.add_argument("--all", action="store_true", help="Run all scenarios")
119+
parser.add_argument("sim", nargs="?", help="Simulator to run")
120+
parser.add_argument("limit", nargs="?", help="Limit filter")
121+
args = parser.parse_args()
122+
123+
scenarios = load_scenarios()
124+
125+
# List scenarios mode
126+
if not args.sim and not args.all:
127+
list_scenarios(scenarios)
128+
return 0
129+
130+
# Check prerequisites
131+
print("==> Checking prerequisites...")
132+
if not (HIVE_DIR / "hive.go").exists():
133+
print(f"Error: Hive not found at {HIVE_DIR}")
134+
print("Set HIVE_DIR or clone hive there")
135+
return 1
136+
137+
# Build hive if the binary does not exst
138+
if not (HIVE_DIR / "hive").exists():
139+
print("==> Building hive...")
140+
subprocess.run(["go", "build", "-o", "hive", "."], cwd=HIVE_DIR, check=True)
141+
142+
# Build Docker image
143+
if not args.skip_build:
144+
print("==> Building bera-reth Docker image...")
145+
subprocess.run(
146+
[
147+
"docker",
148+
"build",
149+
"-t",
150+
"ghcr.io/berachain/bera-reth:nightly",
151+
"-f",
152+
str(BERA_RETH_DIR / ".github/assets/hive/Dockerfile"),
153+
"--build-arg",
154+
"CARGO_BIN=bera-reth",
155+
"--build-arg",
156+
"BUILD_PROFILE=hivetests",
157+
str(BERA_RETH_DIR),
158+
],
159+
check=True,
160+
)
161+
162+
# Run scenarios
163+
failed = False
164+
if args.all:
165+
print("==> Running all scenarios...")
166+
for s in scenarios:
167+
filter_str = get_filter(s)
168+
if not run_scenario(s["sim"], filter_str):
169+
failed = True
170+
else:
171+
scenario = find_scenario(scenarios, args.sim, args.limit)
172+
if not scenario:
173+
print(f"Error: Scenario not found: {args.sim} {args.limit or ''}")
174+
return 1
175+
filter_str = get_filter(scenario)
176+
if not run_scenario(args.sim, filter_str):
177+
failed = True
178+
179+
print()
180+
if failed:
181+
print("==> FAILED: Some scenarios had unexpected failures")
182+
return 1
183+
print("==> All scenarios passed!")
184+
return 0
185+
186+
187+
if __name__ == "__main__":
188+
sys.exit(main())

0 commit comments

Comments
 (0)