Init Push

This commit is contained in:
2026-04-19 16:14:05 +08:00
commit 5b6bd1ac23
54 changed files with 80296 additions and 0 deletions

View File

@@ -0,0 +1,249 @@
#!/usr/bin/env python3
"""验证 SLWChipVerify 在热门开源 GitHub CPU 项目上的一键流程可用性。"""
from __future__ import annotations
import argparse
import json
import shutil
import subprocess
import sys
from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
@dataclass
class ProjectCase:
"""描述一个待验证项目的仓库信息与仿真参数。"""
name: str
repo: str
stars: int
rtl_subdir: str
top: str
cycles: int
period: int
reset_cycles: int
def _run(cmd: list[str], cwd: Path | None = None) -> subprocess.CompletedProcess[str]:
"""执行外部命令并收集标准输出/标准错误。"""
return subprocess.run(
cmd,
cwd=str(cwd) if cwd is not None else None,
text=True,
capture_output=True,
check=False,
)
def _load_cases(path: Path) -> list[ProjectCase]:
"""从 JSON 文件加载项目清单。"""
raw: list[dict[str, Any]] = json.loads(path.read_text(encoding="utf-8"))
cases: list[ProjectCase] = []
for item in raw:
cases.append(
ProjectCase(
name=str(item["name"]),
repo=str(item["repo"]),
stars=int(item["stars"]),
rtl_subdir=str(item["rtl_subdir"]),
top=str(item["top"]),
cycles=int(item["cycles"]),
period=int(item["period"]),
reset_cycles=int(item["reset_cycles"]),
)
)
return cases
def _clone_or_update(case: ProjectCase, clone_dir: Path) -> tuple[bool, str]:
"""克隆仓库,或对已有仓库做浅拉取并硬重置到远端最新状态。"""
if clone_dir.exists() and (clone_dir / ".git").exists():
fetch = _run(["git", "-C", str(clone_dir), "fetch", "--depth", "1", "origin"])
if fetch.returncode != 0:
return False, fetch.stderr.strip() or fetch.stdout.strip()
reset = _run(["git", "-C", str(clone_dir), "reset", "--hard", "origin/HEAD"])
if reset.returncode != 0:
return False, reset.stderr.strip() or reset.stdout.strip()
return True, "updated"
if clone_dir.exists():
shutil.rmtree(clone_dir)
clone_dir.parent.mkdir(parents=True, exist_ok=True)
clone = _run(["git", "clone", "--depth", "1", case.repo, str(clone_dir)])
if clone.returncode != 0:
return False, clone.stderr.strip() or clone.stdout.strip()
return True, "cloned"
def main() -> int:
"""按项目列表逐个执行一键验证,并输出汇总 JSON 报告。"""
script_dir = Path(__file__).resolve().parent
default_repo_root = script_dir.parents[2]
parser = argparse.ArgumentParser(description="运行 SLWChipVerify GitHub CPU 验证示例")
parser.add_argument(
"--repo-root",
default=str(default_repo_root),
help="Path to Verilog-Learn repository root",
)
parser.add_argument(
"--projects",
default=str(script_dir / "projects.json"),
help="Path to project case JSON",
)
parser.add_argument(
"--workspace",
default="/tmp/slwchipverify_github_cpu_example",
help="Temporary clone workspace",
)
parser.add_argument(
"--results-dir",
default=str(script_dir / "results"),
help="Directory to store per-case outputs and summary",
)
parser.add_argument(
"--python",
default=sys.executable,
help="Python executable used to run one_click_verify.py",
)
args = parser.parse_args()
repo_root = Path(args.repo_root).expanduser().resolve()
one_click = repo_root / "slwchipverify" / "one_click_verify.py"
if not one_click.exists():
print(
f"[SLWChipVerify][github-example] ERROR: one_click script not found: {one_click}",
file=sys.stderr,
)
return 2
project_file = Path(args.projects).expanduser().resolve()
cases = _load_cases(project_file)
workspace = Path(args.workspace).expanduser().resolve()
clones_dir = workspace / "clones"
results_dir = Path(args.results_dir).expanduser().resolve()
results_dir.mkdir(parents=True, exist_ok=True)
summary: dict[str, Any] = {
"timestamp_utc": datetime.now(timezone.utc).isoformat(),
"repo_root": str(repo_root),
"workspace": str(workspace),
"projects_file": str(project_file),
"tool": str(one_click),
"results": [],
}
pass_count = 0
fail_count = 0
for case in cases:
print(f"\\n=== Case: {case.name} ===")
clone_dir = clones_dir / case.name
ok, clone_msg = _clone_or_update(case, clone_dir)
if not ok:
fail_count += 1
result = {
"name": case.name,
"repo": case.repo,
"stars": case.stars,
"status": "fail",
"stage": "clone",
"error": clone_msg,
}
summary["results"].append(result)
print(f"[SLWChipVerify][github-example] FAIL clone: {clone_msg}")
continue
target_dir = (clone_dir / case.rtl_subdir).resolve()
out_dir = results_dir / case.name
if out_dir.exists():
shutil.rmtree(out_dir)
out_dir.mkdir(parents=True, exist_ok=True)
cmd = [
args.python,
str(one_click),
"--dir",
str(target_dir),
"--top",
case.top,
"--out",
str(out_dir),
"--cycles",
str(case.cycles),
"--period",
str(case.period),
"--reset-cycles",
str(case.reset_cycles),
]
run = _run(cmd)
run_log = out_dir / "run.log"
run_log.write_text(
"COMMAND:\n"
+ " ".join(cmd)
+ "\n\nSTDOUT:\n"
+ run.stdout
+ "\n\nSTDERR:\n"
+ run.stderr,
encoding="utf-8",
)
artifacts = {
"tb": str(next(iter(sorted(out_dir.glob("tb_*_auto.v"))), "")),
"vcd": str(next(iter(sorted(out_dir.glob("*_auto.vcd"))), "")),
"sim_log": str(out_dir / "sim_output.log") if (out_dir / "sim_output.log").exists() else "",
"sim_binary": str(out_dir / "auto_sim.out") if (out_dir / "auto_sim.out").exists() else "",
"run_log": str(run_log),
}
status = "pass" if run.returncode == 0 else "fail"
if status == "pass":
pass_count += 1
print("[SLWChipVerify][github-example] PASS")
else:
fail_count += 1
print(f"[SLWChipVerify][github-example] FAIL (exit={run.returncode})")
result = {
"name": case.name,
"repo": case.repo,
"stars": case.stars,
"status": status,
"clone_status": clone_msg,
"rtl_dir": str(target_dir),
"top": case.top,
"return_code": run.returncode,
"artifacts": artifacts,
}
if status == "fail":
result["error_tail"] = (run.stderr or run.stdout)[-2000:]
summary["results"].append(result)
summary["pass_count"] = pass_count
summary["fail_count"] = fail_count
summary["total"] = len(cases)
summary_path = results_dir / "github_cpu_validation_summary.json"
summary_path.write_text(json.dumps(summary, ensure_ascii=False, indent=2), encoding="utf-8")
print("\\n=== Summary ===")
print(f"Passed: {pass_count}")
print(f"Failed: {fail_count}")
print(f"Summary: {summary_path}")
return 0 if fail_count == 0 else 1
if __name__ == "__main__":
raise SystemExit(main())