This commit is contained in:
david rice
2026-04-09 10:29:53 +01:00
parent be7658b54d
commit 82e6efbcad
7 changed files with 488 additions and 71 deletions

View File

@@ -16,6 +16,7 @@ File naming convention: YYYYMMDD_HHMMSS_{sig|proto|lp}_{NNNN}_{clk|dat}.csv
"""
import csv
import json
import re
import numpy as np
from dataclasses import dataclass, field
@@ -42,10 +43,14 @@ LP11_HIGH_V = 0.8 # V — single-ended voltage above this → LP-11 (bot
LP_LOW_V = 0.25 # V — single-ended voltage below this → LP-00 or LP-01 pin low
# Note: probe loading can shift LP-low from true 0 V to ~100 mV; 0.25 V clears that offset
# The rolling-std gate (HS_OSC_STD_V) prevents HS minima near 0 V being called LP-low.
LP11_SPEC_MIN_V = 1.0 # V — LP-11 minimum voltage spec
LP11_SPEC_MAX_V = 1.45 # V — LP-11 maximum voltage spec
LP_LOW_DUR_MIN_NS = 50.0 # ns — minimum LP-low duration per D-PHY spec (LP-01 + LP-00 combined)
HS_OSC_STD_V = 0.045 # V — rolling-std threshold above which a region is classified as HS
LP11_SPEC_MIN_V = 1.0 # V — LP-11 minimum voltage spec
LP11_SPEC_MAX_V = 1.45 # V — LP-11 maximum voltage spec
LP_LOW_DUR_MIN_NS = 50.0 # ns — minimum LP-low duration per D-PHY spec (LP-01 + LP-00 combined)
HS_OSC_STD_V = 0.045 # V — rolling-std threshold above which a region is classified as HS
# Flicker detection threshold
# LP-low plateau below this → SoT sequence too brief for receiver to detect → flicker risk
FLICKER_LP_LOW_MAX_NS = 50.0 # ns
@dataclass
@@ -420,6 +425,46 @@ def analyze_1v8_file(path: Path) -> "V1V8Metrics":
)
@dataclass
class RegDump:
"""DSI controller register snapshot read from device via memtool."""
timestamp: str
capture_num: int
commands: list # list of memtool command strings that were run
registers: list # [{"address": "0x...", "value": "0x...", "name": "..."}, ...]
errors: list # any device-side errors
def summary(self) -> str:
lines = [f"Capture {self.capture_num:04d} {self.timestamp} [reg/dsi_phy]"]
if self.errors:
for err in self.errors:
lines.append(f" WARNING: {err}")
if not self.registers:
lines.append(" No registers captured")
return "\n".join(lines)
lines.append(f" Commands : {'; '.join(self.commands)}")
for r in self.registers:
name = f" ({r['name']})" if r.get("name") else ""
lines.append(f" {r['address']} : {r['value']}{name}")
return "\n".join(lines)
def analyze_reg_file(path: Path) -> "RegDump":
"""Read a register JSON file saved by mipi_test._fetch_registers()."""
m = re.match(r"(\d{8}_\d{6})_reg_(\d+)\.json", path.name, re.IGNORECASE)
if not m:
raise ValueError(f"Filename does not match register pattern: {path.name}")
timestamp, cap_str = m.groups()
data = json.loads(path.read_text())
return RegDump(
timestamp = timestamp,
capture_num = int(cap_str),
commands = data.get("commands", []),
registers = data.get("registers", []),
errors = data.get("errors") or [],
)
def group_captures(data_dir: Path) -> dict[tuple[str, int], dict[str, Path]]:
"""
Scan data_dir and group CSV files by (timestamp, capture_number).
@@ -427,17 +472,30 @@ def group_captures(data_dir: Path) -> dict[tuple[str, int], dict[str, Path]]:
Example key: ("20260408_111448", 1)
Example value: {"sig_clk": Path(...), "sig_dat": ..., "proto_clk": ..., "proto_dat": ...}
"""
pattern = re.compile(
csv_pattern = re.compile(
r"(\d{8}_\d{6})_(sig|proto|lp|pwr)_(\d+)_(clk|dat|1v8)\.csv", re.IGNORECASE
)
reg_pattern = re.compile(
r"(\d{8}_\d{6})_reg_(\d+)\.json", re.IGNORECASE
)
groups: dict[tuple[str, int], dict[str, Path]] = {}
for f in sorted(data_dir.glob("*.csv")):
m = pattern.match(f.name)
m = csv_pattern.match(f.name)
if not m:
continue
ts, ftype, cap_str, ch = m.groups()
key = (ts, int(cap_str))
groups.setdefault(key, {})[f"{ftype}_{ch}"] = f
for f in sorted(data_dir.glob("*.json")):
m = reg_pattern.match(f.name)
if not m:
continue
ts, cap_str = m.groups()
key = (ts, int(cap_str))
groups.setdefault(key, {})["reg"] = f
return groups
@@ -470,6 +528,11 @@ class LPMetrics:
lp_transition_valid: bool # LP-11 → LP-low → HS sequence present
# Flicker detection
# A capture is flagged when the LP-low plateau is absent or shorter than
# FLICKER_LP_LOW_MAX_NS. Normal captures show ~340 ns; flicker shows 050 ns.
flicker_suspect: bool = False
warnings: list = field(default_factory=list)
def summary(self) -> str:
@@ -501,6 +564,8 @@ class LPMetrics:
+ (f" avg {self.hs_burst_dur_ns:.0f} ns" if self.hs_burst_dur_ns else ""))
if self.hs_amplitude_mv is not None:
lines.append(f" HS amplitude : {self.hs_amplitude_mv:.0f} mV (single-ended p-p/2)")
if self.flicker_suspect:
lines.append(f" *** FLICKER SUSPECT: LP-low plateau absent or < {FLICKER_LP_LOW_MAX_NS:.0f} ns ***")
for w in self.warnings:
lines.append(f" WARNING: {w}")
return "\n".join(lines)
@@ -648,6 +713,15 @@ def analyze_lp_file(path: Path) -> "LPMetrics":
if n_hs_bursts == 0:
warnings.append("No HS bursts detected after LP transition")
# Flicker suspect: LP→HS sequence detected but LP-low plateau is absent or too short.
# Normal captures show ~340 ns; the confirmed flicker capture showed 0 ns.
# Only flag DAT lane (CLK is continuous HS — LP states not expected).
flicker_suspect = (
channel == "dat"
and lp_transition_valid
and (lp_low_duration_ns is None or lp_low_duration_ns < FLICKER_LP_LOW_MAX_NS)
)
return LPMetrics(
timestamp = timestamp,
capture_num = capture_num,
@@ -663,6 +737,7 @@ def analyze_lp_file(path: Path) -> "LPMetrics":
hs_burst_dur_ns = hs_burst_dur_ns,
hs_amplitude_mv = hs_amplitude_mv,
lp_transition_valid = lp_transition_valid,
flicker_suspect = flicker_suspect,
warnings = warnings,
)