updated ai

This commit is contained in:
david rice
2026-04-08 14:19:31 +01:00
parent d0e23c4e01
commit 15dc295ae1
6 changed files with 175 additions and 59 deletions

View File

@@ -12,13 +12,17 @@ Usage:
import argparse
import sys
from datetime import datetime
from pathlib import Path
import anthropic
import requests
from csv_preprocessor import analyze_file, analyze_lp_file, group_captures, ChannelMetrics, LPMetrics
DATA_DIR = Path(__file__).parent / "data"
DATA_DIR = Path(__file__).parent / "data"
ANALYSIS_LOG = Path(__file__).parent / "analysis_log.txt"
DISPLAY_URL = "http://192.168.45.8:5000/display"
CLAUDE_MODEL = "claude-opus-4-6"
SYSTEM_PROMPT = (
@@ -90,6 +94,62 @@ def build_prompt(all_summaries: list[str]) -> str:
# Main
# ---------------------------------------------------------------------------
def run_analysis(last: int = 10) -> None:
"""
Called by mgmt_worker after each file transfer.
Analyses the most recent `last` captures and prints the Claude report.
"""
groups = group_captures(DATA_DIR)
if not groups:
print("[ANALYSIS] No captures found.")
return
keys = sorted(groups.keys())[-last:]
print(f"\n[ANALYSIS] Processing {len(keys)} most-recent capture(s)...")
all_summaries: list[str] = []
for ts, num in keys:
summary_text, _ = process_capture(ts, num, groups[(ts, num)])
all_summaries.append(summary_text)
prompt = build_prompt(all_summaries)
print(f"[ANALYSIS] Sending {len(prompt):,} chars to {CLAUDE_MODEL}...")
client = anthropic.Anthropic()
message = client.messages.create(
model = CLAUDE_MODEL,
max_tokens = 1024,
system = SYSTEM_PROMPT,
messages = [{"role": "user", "content": prompt}],
)
analysis = message.content[0].text
token_line = f"Tokens: {message.usage.input_tokens} in / {message.usage.output_tokens} out"
# ── Console ───────────────────────────────────────────────────────────
separator = "=" * 60
print(f"\n{separator}")
print("CLAUDE ANALYSIS")
print(separator)
print(analysis)
print(f"({token_line})")
print(separator + "\n")
# ── Append to log file ────────────────────────────────────────────────
ts = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
with open(ANALYSIS_LOG, "a", encoding="utf-8") as f:
f.write(f"\n{'='*60}\n{ts} — captures {keys[0][1]:04d}{keys[-1][1]:04d}\n{'='*60}\n")
f.write(analysis)
f.write(f"\n({token_line})\n")
print(f"[ANALYSIS] Report appended to {ANALYSIS_LOG}")
# ── Send to display ───────────────────────────────────────────────────
try:
requests.post(DISPLAY_URL, json={"text": analysis}, timeout=5)
print("[ANALYSIS] Report sent to display.")
except Exception as e:
print(f"[ANALYSIS] Display send failed: {e}")
def main() -> None:
parser = argparse.ArgumentParser(description="Analyse MIPI CSV captures with Claude")
parser.add_argument("--last", type=int, default=None, metavar="N",
@@ -147,14 +207,30 @@ def main() -> None:
system = SYSTEM_PROMPT,
messages = [{"role": "user", "content": prompt}],
)
analysis = message.content[0].text
analysis = message.content[0].text
token_line = f"Tokens: {message.usage.input_tokens} in / {message.usage.output_tokens} out"
separator = "=" * 60
ts = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print("=" * 60)
print("CLAUDE ANALYSIS")
print("=" * 60)
# Console
print(f"\n{separator}\nCLAUDE ANALYSIS\n{separator}")
print(analysis)
print()
print(f"(Tokens used: {message.usage.input_tokens} in / {message.usage.output_tokens} out)")
print(f"({token_line})")
print(separator)
# Log file
with open(ANALYSIS_LOG, "a", encoding="utf-8") as f:
f.write(f"\n{separator}\n{ts}\n{separator}\n")
f.write(analysis)
f.write(f"\n({token_line})\n")
print(f"\nReport appended to {ANALYSIS_LOG}")
# Display
try:
requests.post(DISPLAY_URL, json={"text": analysis}, timeout=5)
print("Report sent to display.")
except Exception as e:
print(f"Display send failed: {e}")
if __name__ == "__main__":