1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109
| """Phase 0 — Pre-flight check.""" import json, os, sys, time
PASS = "\033[32m✓\033[0m" FAIL = "\033[31m✗\033[0m" WARN = "\033[33m!\033[0m"
def _p(tag, msg): print(f" {tag} {msg}")
def check_imports(): print("[1/5] Python dependencies") ok = True for mod in ("openai", "requests", "dotenv"): try: __import__(mod); _p(PASS, f"import {mod}") except ImportError as e: _p(FAIL, f"import {mod}: {e}"); ok = False return ok
def check_env(): print("[2/5] Environment variables (.env)") try: from dotenv import load_dotenv; load_dotenv() except ImportError: pass ok = True for k in ["LLM_API_KEY", "EDGAR_USER_AGENT"]: v = os.environ.get(k, "") if not v or "REPLACE" in v or "example.com" in v: _p(FAIL, f"{k} unset or placeholder"); ok = False else: _p(PASS, f"{k} set") return ok
def check_edgar(): print("[3/5] SEC EDGAR reachability") import requests ua = os.environ.get("EDGAR_USER_AGENT", "") r = requests.get("https://data.sec.gov/submissions/CIK0000320193.json", headers={"User-Agent": ua}, timeout=15) if r.status_code == 200 and "filings" in r.json(): _p(PASS, f"EDGAR 200 ({len(r.content)//1024} KB)"); return True _p(FAIL, f"EDGAR HTTP {r.status_code} — check EDGAR_USER_AGENT"); return False
def check_llm(): print("[4/5] LLM endpoint smoke calls") import openai client = openai.OpenAI( api_key=os.environ.get("LLM_API_KEY", ""), base_url=os.environ.get("LLM_BASE_URL", "https://api.deepseek.com/v1"), ) models = { "MODEL_GEN": os.environ.get("MODEL_GEN", "deepseek-chat"), "MODEL_REASON": os.environ.get("MODEL_REASON", "deepseek-reasoner"), "MODEL_JUDGE": os.environ.get("MODEL_JUDGE", "deepseek-chat"), } seen = {} for role, name in models.items(): seen.setdefault(name, role) ok = True for name, role in seen.items(): try: t0 = time.time() r = client.chat.completions.create( model=name, messages=[{"role": "user", "content": "Reply with: pong"}], max_tokens=5, temperature=0.0, ) dt = time.time() - t0 _p(PASS, f"{role}={name} ({dt:.1f}s)") except Exception as e: _p(FAIL, f"{role}={name}: {e}"); ok = False return ok
def check_json_mode(): print("[5/5] response_format=json_object support") import openai client = openai.OpenAI( api_key=os.environ.get("LLM_API_KEY", ""), base_url=os.environ.get("LLM_BASE_URL", "https://api.deepseek.com/v1"), ) model = os.environ.get("MODEL_GEN", "deepseek-chat") try: r = client.chat.completions.create( model=model, messages=[ {"role": "system", "content": "Return strict JSON only."}, {"role": "user", "content": 'Return JSON: {"ok": true}'}, ], response_format={"type": "json_object"}, max_tokens=20, temperature=0.0, ) parsed = json.loads(r.choices[0].message.content) _p(PASS, f"{model} JSON mode works"); return True except Exception as e: _p(FAIL, f"{model} JSON mode failed: {e}"); return False
def main(): results = [ ("deps", check_imports()), ("env", check_env()), ("edgar", check_edgar()), ("llm", check_llm()), ("json_mode", check_json_mode()), ] passed = sum(1 for _, ok in results if ok) for name, ok in results: print(f" {PASS if ok else FAIL} {name}") print(f"\n{passed}/{len(results)} checks passed.") return 0 if passed == len(results) else 1
if __name__ == "__main__": sys.exit(main())
|