First commit of claude's rework in django + vanillajs fronted
This commit is contained in:
0
apps/tools/__init__.py
Normal file
0
apps/tools/__init__.py
Normal file
45
apps/tools/admin.py
Normal file
45
apps/tools/admin.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from django.contrib import admin
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from .models import ChronographAnalysis, Shot, ShotGroup
|
||||
|
||||
|
||||
class ShotInline(admin.TabularInline):
|
||||
model = Shot
|
||||
extra = 0
|
||||
readonly_fields = ('shot_number',)
|
||||
fields = ('shot_number', 'velocity_fps', 'notes')
|
||||
|
||||
|
||||
class ShotGroupInline(admin.TabularInline):
|
||||
model = ShotGroup
|
||||
extra = 0
|
||||
show_change_link = True
|
||||
fields = ('label', 'distance_m', 'order', 'ammo_batch', 'notes')
|
||||
raw_id_fields = ('ammo_batch',)
|
||||
|
||||
|
||||
@admin.register(ChronographAnalysis)
|
||||
class ChronographAnalysisAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'user', 'date', 'created_at')
|
||||
search_fields = ('name', 'user__email', 'notes')
|
||||
readonly_fields = ('created_at', 'updated_at')
|
||||
raw_id_fields = ('user',)
|
||||
inlines = [ShotGroupInline]
|
||||
|
||||
|
||||
@admin.register(ShotGroup)
|
||||
class ShotGroupAdmin(admin.ModelAdmin):
|
||||
list_display = ('label', 'analysis', 'distance_m', 'order', 'ammo_batch')
|
||||
search_fields = ('label', 'analysis__name')
|
||||
raw_id_fields = ('analysis', 'ammo_batch')
|
||||
inlines = [ShotInline]
|
||||
|
||||
|
||||
@admin.register(Shot)
|
||||
class ShotAdmin(admin.ModelAdmin):
|
||||
list_display = ('shot_number', 'group', 'velocity_fps', 'notes')
|
||||
search_fields = ('group__label', 'group__analysis__name')
|
||||
readonly_fields = ('shot_number',)
|
||||
raw_id_fields = ('group',)
|
||||
ordering = ('group', 'shot_number')
|
||||
0
apps/tools/analyzer/__init__.py
Normal file
0
apps/tools/analyzer/__init__.py
Normal file
82
apps/tools/analyzer/charts.py
Normal file
82
apps/tools/analyzer/charts.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import io
|
||||
import base64
|
||||
import matplotlib
|
||||
matplotlib.use("Agg")
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.dates as mdates
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def render_group_charts(groups: list, y_min: float, y_max: float) -> list:
|
||||
padding_fraction = 0.05
|
||||
y_range = y_max - y_min
|
||||
if y_range == 0:
|
||||
y_pad = 1.0
|
||||
else:
|
||||
y_pad = y_range * padding_fraction
|
||||
|
||||
charts = []
|
||||
for i, g in enumerate(groups):
|
||||
fig, ax = plt.subplots(figsize=(9, 4))
|
||||
|
||||
x = g["time"]
|
||||
y = g["speed"]
|
||||
|
||||
ax.plot(x, y, marker="o", linewidth=1.5, markersize=5, color="#1f77b4")
|
||||
|
||||
ax.set_ylim(y_min - y_pad, y_max + y_pad)
|
||||
|
||||
ax.xaxis.set_major_formatter(mdates.DateFormatter("%H:%M:%S"))
|
||||
fig.autofmt_xdate(rotation=30)
|
||||
|
||||
ax.set_title(f"Group {i + 1} — {len(g)} shot(s)")
|
||||
ax.set_xlabel("Time of Day")
|
||||
ax.set_ylabel("Speed")
|
||||
ax.grid(True, alpha=0.3)
|
||||
fig.tight_layout()
|
||||
|
||||
buf = io.BytesIO()
|
||||
fig.savefig(buf, format="png", dpi=100)
|
||||
plt.close(fig)
|
||||
buf.seek(0)
|
||||
charts.append(base64.b64encode(buf.read()).decode("utf-8"))
|
||||
|
||||
return charts
|
||||
|
||||
|
||||
def render_overview_chart(group_stats: list) -> str:
|
||||
"""Dual-axis line chart: avg speed and avg std dev per group."""
|
||||
indices = [s["group_index"] for s in group_stats]
|
||||
speeds = [s["mean_speed"] for s in group_stats]
|
||||
stds = [s["std_speed"] if s["std_speed"] is not None else 0.0 for s in group_stats]
|
||||
|
||||
fig, ax1 = plt.subplots(figsize=(7, 3))
|
||||
|
||||
color_speed = "#1f77b4"
|
||||
color_std = "#d62728"
|
||||
|
||||
ax1.plot(indices, speeds, marker="o", linewidth=1.8, markersize=5,
|
||||
color=color_speed, label="Avg speed")
|
||||
ax1.set_xlabel("Group")
|
||||
ax1.set_ylabel("Avg speed", color=color_speed)
|
||||
ax1.tick_params(axis="y", labelcolor=color_speed)
|
||||
ax1.set_xticks(indices)
|
||||
|
||||
ax2 = ax1.twinx()
|
||||
ax2.plot(indices, stds, marker="s", linewidth=1.8, markersize=5,
|
||||
color=color_std, linestyle="--", label="Avg std dev")
|
||||
ax2.set_ylabel("Avg std dev", color=color_std)
|
||||
ax2.tick_params(axis="y", labelcolor=color_std)
|
||||
|
||||
lines1, labels1 = ax1.get_legend_handles_labels()
|
||||
lines2, labels2 = ax2.get_legend_handles_labels()
|
||||
ax1.legend(lines1 + lines2, labels1 + labels2, fontsize=8, loc="upper right")
|
||||
|
||||
ax1.grid(True, alpha=0.3)
|
||||
fig.tight_layout()
|
||||
|
||||
buf = io.BytesIO()
|
||||
fig.savefig(buf, format="png", dpi=100)
|
||||
plt.close(fig)
|
||||
buf.seek(0)
|
||||
return base64.b64encode(buf.read()).decode("utf-8")
|
||||
60
apps/tools/analyzer/grouper.py
Normal file
60
apps/tools/analyzer/grouper.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from datetime import timedelta
|
||||
import pandas as pd
|
||||
|
||||
OUTLIER_FACTOR = 5
|
||||
|
||||
|
||||
def detect_groups(df: pd.DataFrame, outlier_factor: float = OUTLIER_FACTOR,
|
||||
manual_splits: list | None = None,
|
||||
forced_splits: list | None = None) -> list:
|
||||
"""Split shots into groups.
|
||||
|
||||
forced_splits: when provided, ONLY these split positions are used — auto-detection
|
||||
is bypassed entirely. Use this for user-defined groupings from the visual editor.
|
||||
|
||||
manual_splits: added on top of auto-detected splits (when forced_splits is None).
|
||||
Both auto+manual mechanisms are merged and deduplicated.
|
||||
"""
|
||||
if len(df) <= 1:
|
||||
return [df]
|
||||
|
||||
def _build_groups(all_splits):
|
||||
if not all_splits:
|
||||
return [df]
|
||||
groups = []
|
||||
prev = 0
|
||||
for pos in all_splits:
|
||||
group = df.iloc[prev:pos]
|
||||
if len(group) > 0:
|
||||
groups.append(group.reset_index(drop=True))
|
||||
prev = pos
|
||||
last = df.iloc[prev:]
|
||||
if len(last) > 0:
|
||||
groups.append(last.reset_index(drop=True))
|
||||
return groups
|
||||
|
||||
# Forced mode: user controls exact split positions, no auto-detection
|
||||
if forced_splits is not None:
|
||||
valid = sorted(s for s in forced_splits if 0 < s < len(df))
|
||||
return _build_groups(valid)
|
||||
|
||||
times = df["time"]
|
||||
diffs = times.diff().dropna()
|
||||
|
||||
if diffs.empty:
|
||||
return [df]
|
||||
|
||||
median_gap = diffs.median()
|
||||
|
||||
# Auto-detect splits based on time gaps
|
||||
auto_splits: set[int] = set()
|
||||
if median_gap != timedelta(0):
|
||||
threshold = outlier_factor * median_gap
|
||||
for idx, gap in diffs.items():
|
||||
if gap > threshold:
|
||||
pos = df.index.get_loc(idx)
|
||||
auto_splits.add(pos)
|
||||
|
||||
# Merge with manual splits (filter to valid range)
|
||||
extra = set(manual_splits) if manual_splits else set()
|
||||
return _build_groups(sorted(auto_splits | extra))
|
||||
107
apps/tools/analyzer/parser.py
Normal file
107
apps/tools/analyzer/parser.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import csv
|
||||
import io
|
||||
import pandas as pd
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
CANONICAL_COLS = ["idx", "speed", "std_dev", "energy", "power_factor", "time"]
|
||||
TIME_FORMATS = ["%H:%M:%S.%f", "%H:%M:%S", "%H:%M:%S,%f"]
|
||||
|
||||
|
||||
def parse_csv(stream) -> pd.DataFrame:
|
||||
raw = stream.read()
|
||||
if isinstance(raw, bytes):
|
||||
raw = raw.decode("utf-8-sig")
|
||||
# Strip BOM characters that may appear anywhere in the file
|
||||
raw = raw.replace("\ufeff", "")
|
||||
|
||||
data_rows = []
|
||||
for line in raw.splitlines():
|
||||
fields = _split_line(line)
|
||||
if len(fields) >= 6 and _is_index(fields[0]) and _is_time(fields[5]):
|
||||
data_rows.append(fields[:6])
|
||||
|
||||
if len(data_rows) < 2:
|
||||
raise ValueError(
|
||||
"Could not find valid data rows in the CSV. "
|
||||
"Expected rows with: integer index, 4 numeric values, and a time (HH:MM:SS)."
|
||||
)
|
||||
|
||||
df = pd.DataFrame(data_rows, columns=CANONICAL_COLS)
|
||||
|
||||
for col in ("speed", "std_dev", "energy", "power_factor"):
|
||||
df[col] = _parse_numeric(df[col])
|
||||
|
||||
df["time"] = _parse_time_column(df["time"])
|
||||
df = df.sort_values("time").reset_index(drop=True)
|
||||
return df[["speed", "std_dev", "energy", "power_factor", "time"]]
|
||||
|
||||
|
||||
def _split_line(line: str) -> list:
|
||||
"""Parse one CSV line, respecting quoted fields."""
|
||||
for row in csv.reader([line], quotechar='"', doublequote=True, skipinitialspace=True):
|
||||
return [f.strip() for f in row]
|
||||
return []
|
||||
|
||||
|
||||
def _is_index(val: str) -> bool:
|
||||
"""True if the value is a non-negative integer (auto-increment row index)."""
|
||||
try:
|
||||
return int(val.strip()) >= 0
|
||||
except (ValueError, AttributeError):
|
||||
return False
|
||||
|
||||
|
||||
def _is_time(val: str) -> bool:
|
||||
"""True if the value parses as HH:MM:SS or HH:MM:SS.fff."""
|
||||
cleaned = val.strip()
|
||||
for fmt in TIME_FORMATS:
|
||||
try:
|
||||
datetime.strptime(cleaned, fmt)
|
||||
return True
|
||||
except ValueError:
|
||||
continue
|
||||
return False
|
||||
|
||||
|
||||
def _parse_numeric(col: pd.Series) -> pd.Series:
|
||||
"""Parse a numeric column, accepting both '.' and ',' as decimal separator."""
|
||||
result = pd.to_numeric(col, errors="coerce")
|
||||
if result.isna().any():
|
||||
result = pd.to_numeric(
|
||||
col.astype(str).str.replace(",", ".", regex=False),
|
||||
errors="coerce",
|
||||
)
|
||||
if result.isna().any():
|
||||
bad = col[result.isna()].tolist()
|
||||
raise ValueError(f"Non-numeric values in column: {bad}")
|
||||
return result
|
||||
|
||||
|
||||
def _parse_time_column(col: pd.Series) -> pd.Series:
|
||||
today = datetime.today().date()
|
||||
cleaned = col.astype(str).str.strip()
|
||||
|
||||
parsed = None
|
||||
for fmt in TIME_FORMATS:
|
||||
candidate = pd.to_datetime(cleaned, format=fmt, errors="coerce")
|
||||
if candidate.notna().all():
|
||||
parsed = candidate
|
||||
break
|
||||
|
||||
if parsed is None:
|
||||
candidate = pd.to_datetime(cleaned, errors="coerce")
|
||||
if candidate.notna().all():
|
||||
parsed = candidate
|
||||
|
||||
if parsed is None:
|
||||
raise ValueError(
|
||||
"Could not parse time column. Expected format: HH:MM:SS or HH:MM:SS.fff"
|
||||
)
|
||||
|
||||
parsed = parsed.apply(lambda t: datetime.combine(today, t.time()))
|
||||
|
||||
times = parsed.tolist()
|
||||
for i in range(1, len(times)):
|
||||
if times[i] < times[i - 1]:
|
||||
times[i] += timedelta(days=1)
|
||||
return pd.Series(times, index=col.index)
|
||||
95
apps/tools/analyzer/pdf_report.py
Normal file
95
apps/tools/analyzer/pdf_report.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import base64
|
||||
import io
|
||||
from datetime import datetime
|
||||
|
||||
from fpdf import FPDF
|
||||
|
||||
_COL_LABEL = 80
|
||||
_COL_VALUE = 50
|
||||
_ROW_H = 7
|
||||
|
||||
|
||||
def generate_pdf(overall: dict, group_stats: list, charts: list, overview_chart: str) -> bytes:
|
||||
pdf = FPDF()
|
||||
pdf.set_auto_page_break(auto=True, margin=15)
|
||||
|
||||
pdf.add_page()
|
||||
_title_block(pdf)
|
||||
_overall_section(pdf, overall, overview_chart)
|
||||
|
||||
for stat, chart_b64 in zip(group_stats, charts):
|
||||
_group_section(pdf, stat, chart_b64)
|
||||
|
||||
return bytes(pdf.output())
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _title_block(pdf: FPDF):
|
||||
pdf.set_font("Helvetica", "B", 18)
|
||||
pdf.cell(0, 12, "Ballistic Analysis Report", new_x="LMARGIN", new_y="NEXT", align="C")
|
||||
pdf.set_font("Helvetica", "", 9)
|
||||
pdf.cell(
|
||||
0, 5,
|
||||
f"Generated: {datetime.now().strftime('%Y-%m-%d %H:%M')}",
|
||||
new_x="LMARGIN", new_y="NEXT", align="C",
|
||||
)
|
||||
pdf.ln(8)
|
||||
|
||||
|
||||
def _overall_section(pdf: FPDF, overall: dict, overview_chart: str):
|
||||
_section_heading(pdf, "Overall Statistics")
|
||||
rows = [
|
||||
("Total shots", str(overall["count"])),
|
||||
("Min speed", f"{overall['min_speed']:.4f}"),
|
||||
("Max speed", f"{overall['max_speed']:.4f}"),
|
||||
("Mean speed", f"{overall['mean_speed']:.4f}"),
|
||||
("Std dev (speed)", f"{overall['std_speed']:.4f}" if overall["std_speed"] is not None else "n/a"),
|
||||
]
|
||||
_table(pdf, rows)
|
||||
img_bytes = base64.b64decode(overview_chart)
|
||||
pdf.image(io.BytesIO(img_bytes), x=pdf.l_margin, w=min(140, pdf.epw))
|
||||
pdf.ln(4)
|
||||
|
||||
|
||||
def _group_section(pdf: FPDF, stat: dict, chart_b64: str):
|
||||
pdf.ln(4)
|
||||
heading = (
|
||||
f"Group {stat['group_index']} - "
|
||||
f"{stat['time_start']} to {stat['time_end']} "
|
||||
f"({stat['count']} shot(s))"
|
||||
)
|
||||
_section_heading(pdf, heading)
|
||||
|
||||
rows = [
|
||||
("Min speed", f"{stat['min_speed']:.4f}"),
|
||||
("Max speed", f"{stat['max_speed']:.4f}"),
|
||||
("Mean speed", f"{stat['mean_speed']:.4f}"),
|
||||
("Std dev (speed)", f"{stat['std_speed']:.4f}" if stat["std_speed"] is not None else "n/a"),
|
||||
]
|
||||
_table(pdf, rows)
|
||||
|
||||
img_bytes = base64.b64decode(chart_b64)
|
||||
# Check remaining page space; add new page if chart won't fit
|
||||
if pdf.get_y() + 75 > pdf.page_break_trigger:
|
||||
pdf.add_page()
|
||||
pdf.image(io.BytesIO(img_bytes), x=pdf.l_margin, w=pdf.epw)
|
||||
pdf.ln(4)
|
||||
|
||||
|
||||
def _section_heading(pdf: FPDF, text: str):
|
||||
pdf.set_font("Helvetica", "B", 12)
|
||||
pdf.set_fill_color(230, 236, 255)
|
||||
pdf.cell(0, 8, text, new_x="LMARGIN", new_y="NEXT", fill=True)
|
||||
pdf.ln(2)
|
||||
|
||||
|
||||
def _table(pdf: FPDF, rows: list):
|
||||
for i, (label, value) in enumerate(rows):
|
||||
fill = i % 2 == 0
|
||||
pdf.set_fill_color(248, 249, 252) if fill else pdf.set_fill_color(255, 255, 255)
|
||||
pdf.set_font("Helvetica", "", 10)
|
||||
pdf.cell(_COL_LABEL, _ROW_H, label, border=0, fill=fill)
|
||||
pdf.set_font("Helvetica", "B", 10)
|
||||
pdf.cell(_COL_VALUE, _ROW_H, value, border=0, fill=fill, new_x="LMARGIN", new_y="NEXT")
|
||||
pdf.ln(3)
|
||||
30
apps/tools/analyzer/stats.py
Normal file
30
apps/tools/analyzer/stats.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def compute_overall_stats(df: pd.DataFrame) -> dict:
|
||||
s = df["speed"]
|
||||
return {
|
||||
"min_speed": s.min(),
|
||||
"max_speed": s.max(),
|
||||
"mean_speed": s.mean(),
|
||||
"std_speed": s.std(ddof=1),
|
||||
"count": len(df),
|
||||
}
|
||||
|
||||
|
||||
def compute_group_stats(groups: list) -> list:
|
||||
result = []
|
||||
for i, g in enumerate(groups):
|
||||
s = g["speed"]
|
||||
std = s.std(ddof=1) if len(g) > 1 else None
|
||||
result.append({
|
||||
"group_index": i + 1,
|
||||
"count": len(g),
|
||||
"min_speed": s.min(),
|
||||
"max_speed": s.max(),
|
||||
"mean_speed": s.mean(),
|
||||
"std_speed": std,
|
||||
"time_start": g["time"].min().strftime("%H:%M:%S"),
|
||||
"time_end": g["time"].max().strftime("%H:%M:%S"),
|
||||
})
|
||||
return result
|
||||
6
apps/tools/apps.py
Normal file
6
apps/tools/apps.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ToolsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.tools'
|
||||
93
apps/tools/migrations/0001_initial.py
Normal file
93
apps/tools/migrations/0001_initial.py
Normal file
@@ -0,0 +1,93 @@
|
||||
# Generated by Django 4.2.16 on 2026-03-24 12:28
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ChronographAnalysis',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=150, verbose_name='name')),
|
||||
('date', models.DateField(verbose_name='date')),
|
||||
('notes', models.TextField(blank=True, verbose_name='notes')),
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='updated at')),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='chronograph_analyses', to=settings.AUTH_USER_MODEL, verbose_name='user')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'chronograph analysis',
|
||||
'verbose_name_plural': 'chronograph analyses',
|
||||
'ordering': ['-date', '-created_at'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ResultPicture',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('image', models.ImageField(upload_to='result_pictures/', verbose_name='image')),
|
||||
('description', models.CharField(blank=True, max_length=255, verbose_name='description')),
|
||||
('group_size_mm', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True, verbose_name='group size (mm)')),
|
||||
('group_size_moa', models.DecimalField(blank=True, decimal_places=3, max_digits=7, null=True, verbose_name='group size (MOA)')),
|
||||
('elevation_offset_mm', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True, verbose_name='elevation offset (mm)')),
|
||||
('elevation_offset_moa', models.DecimalField(blank=True, decimal_places=3, max_digits=7, null=True, verbose_name='elevation offset (MOA)')),
|
||||
('windage_offset_mm', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True, verbose_name='windage offset (mm)')),
|
||||
('windage_offset_moa', models.DecimalField(blank=True, decimal_places=3, max_digits=7, null=True, verbose_name='windage offset (MOA)')),
|
||||
('mean_radius_mm', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True, verbose_name='mean radius (mm)')),
|
||||
('mean_radius_moa', models.DecimalField(blank=True, decimal_places=3, max_digits=7, null=True, verbose_name='mean radius (MOA)')),
|
||||
('uploaded_at', models.DateTimeField(auto_now_add=True, verbose_name='uploaded at')),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='result_pictures', to=settings.AUTH_USER_MODEL, verbose_name='user')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'result picture',
|
||||
'verbose_name_plural': 'result pictures',
|
||||
'ordering': ['-uploaded_at'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ShotGroup',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('label', models.CharField(max_length=100, verbose_name='label')),
|
||||
('distance_m', models.DecimalField(blank=True, decimal_places=1, max_digits=7, null=True, verbose_name='distance (m)')),
|
||||
('order', models.PositiveSmallIntegerField(default=0, verbose_name='order')),
|
||||
('notes', models.TextField(blank=True, verbose_name='notes')),
|
||||
('analysis', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='shot_groups', to='tools.chronographanalysis', verbose_name='analysis')),
|
||||
('result_picture', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='shot_group', to='tools.resultpicture', verbose_name='result picture')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'shot group',
|
||||
'verbose_name_plural': 'shot groups',
|
||||
'ordering': ['order', 'id'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Shot',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('shot_number', models.PositiveSmallIntegerField(editable=False, verbose_name='shot number')),
|
||||
('velocity_fps', models.DecimalField(decimal_places=1, max_digits=6, verbose_name='velocity (fps)')),
|
||||
('notes', models.CharField(blank=True, max_length=255, verbose_name='notes')),
|
||||
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='shots', to='tools.shotgroup', verbose_name='group')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'shot',
|
||||
'verbose_name_plural': 'shots',
|
||||
'ordering': ['shot_number'],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='shot',
|
||||
constraint=models.UniqueConstraint(fields=('group', 'shot_number'), name='unique_shot_number_per_group'),
|
||||
),
|
||||
]
|
||||
20
apps/tools/migrations/0002_shotgroup_ammo_batch.py
Normal file
20
apps/tools/migrations/0002_shotgroup_ammo_batch.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 4.2.16 on 2026-03-24 13:42
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('gears', '0003_ammo_brass_bullet_powder_primer_alter_bipod_options_and_more'),
|
||||
('tools', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='shotgroup',
|
||||
name='ammo_batch',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='shot_groups', to='gears.reloadedammobatch', verbose_name='reloaded ammo batch'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 4.2.16 on 2026-03-25 10:15
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tools', '0002_shotgroup_ammo_batch'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='shotgroup',
|
||||
name='result_picture',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='ResultPicture',
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,32 @@
|
||||
# Generated by Django 4.2.16 on 2026-03-30 13:12
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('gears', '0011_rig_ballistic_fields'),
|
||||
('tools', '0003_remove_shotgroup_result_picture_delete_resultpicture'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='shotgroup',
|
||||
name='ammo',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='shot_groups', to='gears.ammo', verbose_name='factory ammo'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='shotgroup',
|
||||
name='user',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='shot_groups', to=settings.AUTH_USER_MODEL, verbose_name='user'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='shotgroup',
|
||||
name='analysis',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='shot_groups', to='tools.chronographanalysis', verbose_name='analysis'),
|
||||
),
|
||||
]
|
||||
16
apps/tools/migrations/0005_chronographanalysis_is_public.py
Normal file
16
apps/tools/migrations/0005_chronographanalysis_is_public.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tools', '0004_shotgroup_nullable_analysis_user_ammo'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='chronographanalysis',
|
||||
name='is_public',
|
||||
field=models.BooleanField(default=False, verbose_name='public'),
|
||||
),
|
||||
]
|
||||
0
apps/tools/migrations/__init__.py
Normal file
0
apps/tools/migrations/__init__.py
Normal file
157
apps/tools/models.py
Normal file
157
apps/tools/models.py
Normal file
@@ -0,0 +1,157 @@
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
# ── ChronographAnalysis ───────────────────────────────────────────────────────
|
||||
|
||||
class ChronographAnalysis(models.Model):
|
||||
"""
|
||||
A velocity recording session composed of one or more ShotGroups.
|
||||
Can be used anonymously or by an authenticated user.
|
||||
"""
|
||||
user = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
null=True, blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='chronograph_analyses',
|
||||
verbose_name=_('user'),
|
||||
)
|
||||
name = models.CharField(_('name'), max_length=150)
|
||||
date = models.DateField(_('date'))
|
||||
notes = models.TextField(_('notes'), blank=True)
|
||||
is_public = models.BooleanField(_('public'), default=False)
|
||||
created_at = models.DateTimeField(_('created at'), auto_now_add=True)
|
||||
updated_at = models.DateTimeField(_('updated at'), auto_now=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('chronograph analysis')
|
||||
verbose_name_plural = _('chronograph analyses')
|
||||
ordering = ['-date', '-created_at']
|
||||
|
||||
def __str__(self):
|
||||
owner = self.user.email if self.user_id else _('anonymous')
|
||||
return f"{self.name} ({owner})"
|
||||
|
||||
def clean(self):
|
||||
if not self.name or not self.name.strip():
|
||||
raise ValidationError({'name': _('Name may not be blank.')})
|
||||
|
||||
|
||||
# ── ShotGroup ─────────────────────────────────────────────────────────────────
|
||||
|
||||
class ShotGroup(models.Model):
|
||||
"""
|
||||
A named group of shots. Can be nested under a ChronographAnalysis
|
||||
or exist as a standalone group (analysis=None).
|
||||
"""
|
||||
# Optional link to a chronograph session; SET_NULL keeps the group alive
|
||||
# when an analysis is deleted.
|
||||
analysis = models.ForeignKey(
|
||||
ChronographAnalysis,
|
||||
null=True, blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='shot_groups',
|
||||
verbose_name=_('analysis'),
|
||||
)
|
||||
# Owner for standalone groups (groups nested under an analysis inherit
|
||||
# ownership via analysis.user).
|
||||
user = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
null=True, blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='shot_groups',
|
||||
verbose_name=_('user'),
|
||||
)
|
||||
label = models.CharField(_('label'), max_length=100)
|
||||
distance_m = models.DecimalField(
|
||||
_('distance (m)'),
|
||||
max_digits=7, decimal_places=1,
|
||||
null=True, blank=True,
|
||||
)
|
||||
order = models.PositiveSmallIntegerField(_('order'), default=0)
|
||||
notes = models.TextField(_('notes'), blank=True)
|
||||
# Intentional cross-app FKs: tools → gears (string refs avoid circular imports)
|
||||
ammo_batch = models.ForeignKey(
|
||||
'gears.ReloadedAmmoBatch',
|
||||
null=True, blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='shot_groups',
|
||||
verbose_name=_('reloaded ammo batch'),
|
||||
)
|
||||
ammo = models.ForeignKey(
|
||||
'gears.Ammo',
|
||||
null=True, blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='shot_groups',
|
||||
verbose_name=_('factory ammo'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('shot group')
|
||||
verbose_name_plural = _('shot groups')
|
||||
ordering = ['order', 'id']
|
||||
|
||||
def __str__(self):
|
||||
prefix = self.analysis.name if self.analysis_id else _('Standalone')
|
||||
return f"{prefix} / {self.label}"
|
||||
|
||||
def clean(self):
|
||||
if self.distance_m is not None and self.distance_m <= 0:
|
||||
raise ValidationError(
|
||||
{'distance_m': _('Distance must be a positive value.')}
|
||||
)
|
||||
|
||||
|
||||
# ── Shot ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
class Shot(models.Model):
|
||||
"""A single bullet velocity reading within a ShotGroup."""
|
||||
group = models.ForeignKey(
|
||||
ShotGroup,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='shots',
|
||||
verbose_name=_('group'),
|
||||
)
|
||||
shot_number = models.PositiveSmallIntegerField(
|
||||
_('shot number'),
|
||||
editable=False,
|
||||
)
|
||||
velocity_fps = models.DecimalField(
|
||||
_('velocity (fps)'),
|
||||
max_digits=6, decimal_places=1,
|
||||
)
|
||||
notes = models.CharField(_('notes'), max_length=255, blank=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('shot')
|
||||
verbose_name_plural = _('shots')
|
||||
ordering = ['shot_number']
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=['group', 'shot_number'],
|
||||
name='unique_shot_number_per_group',
|
||||
)
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"Shot #{self.shot_number} — {self.velocity_fps} fps"
|
||||
|
||||
def clean(self):
|
||||
if self.velocity_fps is not None and self.velocity_fps <= 0:
|
||||
raise ValidationError(
|
||||
{'velocity_fps': _('Velocity must be a positive value.')}
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.pk:
|
||||
last = (
|
||||
Shot.objects
|
||||
.filter(group=self.group)
|
||||
.order_by('-shot_number')
|
||||
.values_list('shot_number', flat=True)
|
||||
.first()
|
||||
)
|
||||
self.shot_number = (last or 0) + 1
|
||||
super().save(*args, **kwargs)
|
||||
27
apps/tools/permissions.py
Normal file
27
apps/tools/permissions.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from rest_framework.permissions import BasePermission, SAFE_METHODS
|
||||
|
||||
|
||||
class IsOwnerOrUnclaimed(BasePermission):
|
||||
"""
|
||||
Permission for resources with an optional `user` FK.
|
||||
|
||||
- POST (create): open to anyone — viewset sets user=None for anonymous callers.
|
||||
- GET list: viewset filters to own records (or returns empty for anonymous).
|
||||
- GET detail: open to anyone with the ID.
|
||||
- PATCH/PUT/DELETE:
|
||||
* unclaimed (user=None) → anyone may mutate.
|
||||
* claimed (user set) → owner only.
|
||||
|
||||
NOTE: The global DRF default is IsAuthenticated; this class must be
|
||||
explicitly declared on every viewset in the tools app.
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return True # object-level and queryset filtering handle the rest
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
if request.method in SAFE_METHODS:
|
||||
return True
|
||||
if obj.user is None:
|
||||
return True
|
||||
return request.user.is_authenticated and obj.user == request.user
|
||||
176
apps/tools/serializers.py
Normal file
176
apps/tools/serializers.py
Normal file
@@ -0,0 +1,176 @@
|
||||
import math
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from apps.common.serializer_helpers import ammo_detail, batch_detail
|
||||
# Intentional cross-app import: tools depends on gears for ammo linking.
|
||||
from apps.gears.models import Ammo, GearStatus, ReloadedAmmoBatch
|
||||
|
||||
from .models import ChronographAnalysis, Shot, ShotGroup
|
||||
|
||||
|
||||
# ── Stats helper ──────────────────────────────────────────────────────────────
|
||||
|
||||
def _compute_stats(shot_qs):
|
||||
"""
|
||||
Compute velocity statistics from a Shot queryset.
|
||||
|
||||
Uses population standard deviation (divide by N) — consistent with how
|
||||
chronograph software typically reports SD for a complete string of fire.
|
||||
Returns None for all values when there are no shots.
|
||||
"""
|
||||
fps_values = [float(s.velocity_fps) for s in shot_qs]
|
||||
count = len(fps_values)
|
||||
if count == 0:
|
||||
return {
|
||||
'count': 0,
|
||||
'avg_fps': None, 'avg_mps': None,
|
||||
'sd_fps': None,
|
||||
'es_fps': None,
|
||||
'min_fps': None, 'max_fps': None,
|
||||
}
|
||||
avg = sum(fps_values) / count
|
||||
min_fps = min(fps_values)
|
||||
max_fps = max(fps_values)
|
||||
variance = sum((v - avg) ** 2 for v in fps_values) / count
|
||||
return {
|
||||
'count': count,
|
||||
'avg_fps': round(avg, 1),
|
||||
'avg_mps': round(avg * 0.3048, 1),
|
||||
'sd_fps': round(math.sqrt(variance), 1),
|
||||
'es_fps': round(max_fps - min_fps, 1),
|
||||
'min_fps': round(min_fps, 1),
|
||||
'max_fps': round(max_fps, 1),
|
||||
}
|
||||
|
||||
|
||||
# ── Shot ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
class ShotSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Shot
|
||||
fields = ['id', 'shot_number', 'velocity_fps', 'notes']
|
||||
read_only_fields = ['shot_number']
|
||||
|
||||
def validate(self, attrs):
|
||||
instance = Shot(**attrs)
|
||||
instance.clean()
|
||||
return attrs
|
||||
|
||||
|
||||
# ── ShotGroup (nested under analysis) ────────────────────────────────────────
|
||||
|
||||
class ShotGroupSerializer(serializers.ModelSerializer):
|
||||
# Write: accept a ReloadedAmmoBatch PK (nullable)
|
||||
ammo_batch = serializers.PrimaryKeyRelatedField(
|
||||
queryset=ReloadedAmmoBatch.objects.all(),
|
||||
required=False, allow_null=True,
|
||||
)
|
||||
ammo = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Ammo.objects.filter(status=GearStatus.VERIFIED),
|
||||
required=False, allow_null=True,
|
||||
)
|
||||
# Read: compact inline summaries
|
||||
ammo_batch_detail = serializers.SerializerMethodField()
|
||||
ammo_detail = serializers.SerializerMethodField()
|
||||
shots = ShotSerializer(many=True, read_only=True)
|
||||
stats = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = ShotGroup
|
||||
fields = [
|
||||
'id', 'label', 'distance_m', 'order', 'notes',
|
||||
'ammo_batch', # write / read (PK)
|
||||
'ammo_batch_detail', # read (inline)
|
||||
'ammo', # write / read (PK)
|
||||
'ammo_detail', # read (inline)
|
||||
'shots',
|
||||
'stats',
|
||||
]
|
||||
|
||||
def get_stats(self, obj):
|
||||
return _compute_stats(obj.shots.all())
|
||||
|
||||
def get_ammo_batch_detail(self, obj):
|
||||
if not obj.ammo_batch_id:
|
||||
return None
|
||||
return batch_detail(obj.ammo_batch)
|
||||
|
||||
def get_ammo_detail(self, obj):
|
||||
if not obj.ammo_id:
|
||||
return None
|
||||
return ammo_detail(obj.ammo)
|
||||
|
||||
def validate(self, attrs):
|
||||
check_attrs = {k: v for k, v in attrs.items() if k not in ('ammo_batch', 'ammo')}
|
||||
instance = ShotGroup(**check_attrs)
|
||||
instance.clean()
|
||||
return attrs
|
||||
|
||||
def create(self, validated_data):
|
||||
analysis = self.context.get('analysis')
|
||||
if analysis is not None:
|
||||
validated_data['analysis'] = analysis
|
||||
return ShotGroup.objects.create(**validated_data)
|
||||
|
||||
|
||||
# ── ShotGroup (standalone, top-level /api/groups/) ───────────────────────────
|
||||
|
||||
class ShotGroupStandaloneSerializer(ShotGroupSerializer):
|
||||
"""
|
||||
Extends ShotGroupSerializer with writable `analysis` field for
|
||||
standalone groups or groups linked to an analysis after creation.
|
||||
"""
|
||||
analysis = serializers.PrimaryKeyRelatedField(
|
||||
queryset=ChronographAnalysis.objects.none(), # narrowed in __init__
|
||||
required=False, allow_null=True,
|
||||
)
|
||||
ammo_batch = serializers.PrimaryKeyRelatedField(
|
||||
queryset=ReloadedAmmoBatch.objects.none(), # narrowed in __init__
|
||||
required=False, allow_null=True,
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
request = self.context.get('request')
|
||||
if request and request.user.is_authenticated:
|
||||
self.fields['analysis'].queryset = ChronographAnalysis.objects.filter(
|
||||
user=request.user
|
||||
)
|
||||
self.fields['ammo_batch'].queryset = ReloadedAmmoBatch.objects.filter(
|
||||
recipe__user=request.user
|
||||
)
|
||||
|
||||
class Meta(ShotGroupSerializer.Meta):
|
||||
fields = ShotGroupSerializer.Meta.fields + ['analysis']
|
||||
|
||||
def create(self, validated_data):
|
||||
# user is injected by perform_create
|
||||
return ShotGroup.objects.create(**validated_data)
|
||||
|
||||
|
||||
# ── ChronographAnalysis ───────────────────────────────────────────────────────
|
||||
|
||||
class ChronographAnalysisListSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = ChronographAnalysis
|
||||
fields = ['id', 'user', 'name', 'date', 'is_public', 'created_at']
|
||||
read_only_fields = ['user', 'created_at']
|
||||
|
||||
|
||||
class ChronographAnalysisDetailSerializer(serializers.ModelSerializer):
|
||||
shot_groups = ShotGroupSerializer(many=True, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ChronographAnalysis
|
||||
fields = [
|
||||
'id', 'user', 'name', 'date', 'notes', 'is_public',
|
||||
'shot_groups',
|
||||
'created_at', 'updated_at',
|
||||
]
|
||||
read_only_fields = ['user', 'created_at', 'updated_at']
|
||||
|
||||
def validate(self, attrs):
|
||||
instance = ChronographAnalysis(**attrs)
|
||||
instance.clean()
|
||||
return attrs
|
||||
12
apps/tools/urls.py
Normal file
12
apps/tools/urls.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from django.urls import include, path
|
||||
from rest_framework.routers import DefaultRouter
|
||||
|
||||
from .views import ChronographAnalysisViewSet, ShotGroupViewSet
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register(r'tools/chronograph', ChronographAnalysisViewSet, basename='chronograph')
|
||||
router.register(r'groups', ShotGroupViewSet, basename='shot-group')
|
||||
|
||||
urlpatterns = [
|
||||
path('', include(router.urls)),
|
||||
]
|
||||
319
apps/tools/views.py
Normal file
319
apps/tools/views.py
Normal file
@@ -0,0 +1,319 @@
|
||||
import datetime
|
||||
|
||||
import pandas as pd
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework import parsers, status, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from .analyzer.charts import render_group_charts, render_overview_chart
|
||||
from .analyzer.grouper import detect_groups
|
||||
from .analyzer.parser import parse_csv
|
||||
from .analyzer.pdf_report import generate_pdf
|
||||
from .analyzer.stats import compute_group_stats, compute_overall_stats
|
||||
from .models import ChronographAnalysis, Shot, ShotGroup
|
||||
from .permissions import IsOwnerOrUnclaimed
|
||||
from .serializers import (
|
||||
ChronographAnalysisDetailSerializer,
|
||||
ChronographAnalysisListSerializer,
|
||||
ShotGroupSerializer,
|
||||
ShotGroupStandaloneSerializer,
|
||||
ShotSerializer,
|
||||
)
|
||||
|
||||
|
||||
# ── Standalone ShotGroup ──────────────────────────────────────────────────────
|
||||
|
||||
class ShotGroupViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
Top-level CRUD for ShotGroups that may or may not belong to an analysis.
|
||||
|
||||
GET/POST /api/groups/
|
||||
GET/PATCH/DELETE /api/groups/{id}/
|
||||
"""
|
||||
permission_classes = [IsOwnerOrUnclaimed]
|
||||
serializer_class = ShotGroupStandaloneSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
from django.db.models import Q
|
||||
user = self.request.user
|
||||
if not user.is_authenticated:
|
||||
return ShotGroup.objects.none()
|
||||
return (
|
||||
ShotGroup.objects
|
||||
.filter(
|
||||
Q(analysis__user=user) |
|
||||
Q(analysis__isnull=True, user=user)
|
||||
)
|
||||
.select_related('analysis', 'ammo_batch__recipe', 'ammo_batch__powder', 'ammo')
|
||||
.prefetch_related('shots')
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(user=self.request.user)
|
||||
|
||||
|
||||
def _group_to_df(group):
|
||||
"""Convert a ShotGroup ORM object to a DataFrame expected by the analyzer."""
|
||||
shots = list(group.shots.order_by('shot_number'))
|
||||
if not shots:
|
||||
return pd.DataFrame(columns=['speed', 'time'])
|
||||
rows = []
|
||||
base = datetime.datetime(2000, 1, 1, 0, 0, 0)
|
||||
for i, shot in enumerate(shots):
|
||||
rows.append({
|
||||
'speed': float(shot.velocity_fps),
|
||||
'time': base + datetime.timedelta(seconds=i),
|
||||
})
|
||||
return pd.DataFrame(rows)
|
||||
|
||||
|
||||
# ── ChronographAnalysis ───────────────────────────────────────────────────────
|
||||
|
||||
class ChronographAnalysisViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
Manage chronograph analysis sessions and their nested shot groups / shots.
|
||||
|
||||
Standard CRUD on the analysis itself, plus nested actions:
|
||||
GET/POST .../groups/ list / create shot groups
|
||||
GET/PATCH/DELETE .../groups/{group_pk}/ manage a single group
|
||||
GET/POST .../groups/{group_pk}/shots/ list / add individual shots
|
||||
DELETE .../groups/{group_pk}/shots/{shot_pk}/ remove a single shot
|
||||
"""
|
||||
permission_classes = [IsOwnerOrUnclaimed]
|
||||
pagination_class = None # sidebar needs the full list; pagination handled client-side
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
ChronographAnalysis.objects
|
||||
.prefetch_related('shot_groups__shots')
|
||||
)
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == 'list':
|
||||
return ChronographAnalysisListSerializer
|
||||
return ChronographAnalysisDetailSerializer
|
||||
|
||||
def perform_create(self, serializer):
|
||||
user = self.request.user if self.request.user.is_authenticated else None
|
||||
serializer.save(user=user)
|
||||
|
||||
# ── Shot groups ───────────────────────────────────────────────────────────
|
||||
|
||||
@action(detail=True, methods=['get', 'post'], url_path='groups')
|
||||
def groups(self, request, pk=None):
|
||||
analysis = self.get_object()
|
||||
|
||||
if request.method == 'GET':
|
||||
qs = analysis.shot_groups.prefetch_related('shots').order_by('order', 'id')
|
||||
serializer = ShotGroupSerializer(qs, many=True, context={'request': request})
|
||||
return Response(serializer.data)
|
||||
|
||||
serializer = ShotGroupSerializer(
|
||||
data=request.data,
|
||||
context={'request': request, 'analysis': analysis},
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
group = serializer.save()
|
||||
return Response(
|
||||
ShotGroupSerializer(group, context={'request': request}).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
@action(
|
||||
detail=True,
|
||||
methods=['get', 'patch', 'delete'],
|
||||
url_path=r'groups/(?P<group_pk>[^/.]+)',
|
||||
)
|
||||
def group_detail(self, request, pk=None, group_pk=None):
|
||||
analysis = self.get_object()
|
||||
group = get_object_or_404(ShotGroup, pk=group_pk, analysis=analysis)
|
||||
|
||||
if request.method == 'DELETE':
|
||||
group.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
if request.method == 'GET':
|
||||
serializer = ShotGroupSerializer(group, context={'request': request})
|
||||
return Response(serializer.data)
|
||||
|
||||
# PATCH
|
||||
serializer = ShotGroupSerializer(
|
||||
group,
|
||||
data=request.data,
|
||||
partial=True,
|
||||
context={'request': request, 'analysis': analysis},
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response(serializer.data)
|
||||
|
||||
# ── Shots ─────────────────────────────────────────────────────────────────
|
||||
|
||||
@action(
|
||||
detail=True,
|
||||
methods=['get', 'post'],
|
||||
url_path=r'groups/(?P<group_pk>[^/.]+)/shots',
|
||||
)
|
||||
def shots(self, request, pk=None, group_pk=None):
|
||||
analysis = self.get_object()
|
||||
group = get_object_or_404(ShotGroup, pk=group_pk, analysis=analysis)
|
||||
|
||||
if request.method == 'GET':
|
||||
serializer = ShotSerializer(group.shots.all(), many=True, context={'request': request})
|
||||
return Response(serializer.data)
|
||||
|
||||
serializer = ShotSerializer(data=request.data, context={'request': request})
|
||||
serializer.is_valid(raise_exception=True)
|
||||
shot = serializer.save(group=group)
|
||||
return Response(
|
||||
ShotSerializer(shot, context={'request': request}).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
@action(
|
||||
detail=True,
|
||||
methods=['delete'],
|
||||
url_path=r'groups/(?P<group_pk>[^/.]+)/shots/(?P<shot_pk>[^/.]+)',
|
||||
)
|
||||
def shot_detail(self, request, pk=None, group_pk=None, shot_pk=None):
|
||||
analysis = self.get_object()
|
||||
group = get_object_or_404(ShotGroup, pk=group_pk, analysis=analysis)
|
||||
shot = get_object_or_404(Shot, pk=shot_pk, group=group)
|
||||
shot.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
# ── CSV upload ─────────────────────────────────────────────────────────────
|
||||
|
||||
@action(
|
||||
detail=False,
|
||||
methods=['post'],
|
||||
url_path='upload',
|
||||
parser_classes=[parsers.MultiPartParser],
|
||||
permission_classes=[IsOwnerOrUnclaimed],
|
||||
)
|
||||
def upload(self, request):
|
||||
"""
|
||||
Parse a CSV file and create a ChronographAnalysis with auto-detected groups.
|
||||
Expected CSV columns: idx, speed, std_dev, energy, power_factor, time (HH:MM:SS)
|
||||
"""
|
||||
file = request.FILES.get('file')
|
||||
if not file:
|
||||
return Response({'detail': 'No file provided.'}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
name = request.data.get('name', '').strip() or file.name.rsplit('.', 1)[0]
|
||||
notes = request.data.get('notes', '')
|
||||
date_str = request.data.get('date', '')
|
||||
velocity_unit = request.data.get('velocity_unit', 'fps') # 'fps' or 'mps'
|
||||
chrono_type = request.data.get('chrono_type', 'garmin_xero_c1_pro') # future: switch parser
|
||||
|
||||
SUPPORTED_CHRONO_TYPES = {'garmin_xero_c1_pro'}
|
||||
if chrono_type not in SUPPORTED_CHRONO_TYPES:
|
||||
return Response(
|
||||
{'detail': f'Unsupported chronograph type: {chrono_type}'},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
df = parse_csv(file)
|
||||
except ValueError as exc:
|
||||
return Response({'detail': str(exc)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
groups = detect_groups(df)
|
||||
|
||||
# Determine session date
|
||||
if date_str:
|
||||
try:
|
||||
session_date = datetime.date.fromisoformat(date_str)
|
||||
except ValueError:
|
||||
session_date = datetime.date.today()
|
||||
else:
|
||||
try:
|
||||
session_date = df['time'].iloc[0].date()
|
||||
except Exception:
|
||||
session_date = datetime.date.today()
|
||||
|
||||
analysis = ChronographAnalysis.objects.create(
|
||||
user=request.user if request.user.is_authenticated else None,
|
||||
name=name,
|
||||
date=session_date,
|
||||
notes=notes,
|
||||
)
|
||||
|
||||
for i, gdf in enumerate(groups):
|
||||
group = ShotGroup.objects.create(
|
||||
analysis=analysis,
|
||||
label=f'Group {i + 1}',
|
||||
order=i,
|
||||
)
|
||||
for _, row in gdf.iterrows():
|
||||
speed = float(row['speed'])
|
||||
if velocity_unit == 'mps':
|
||||
speed = speed / 0.3048 # convert m/s → fps
|
||||
Shot.objects.create(group=group, velocity_fps=speed)
|
||||
|
||||
return Response(
|
||||
ChronographAnalysisListSerializer(analysis).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
# ── Charts ─────────────────────────────────────────────────────────────────
|
||||
|
||||
@action(detail=True, methods=['get'], url_path='charts')
|
||||
def charts(self, request, pk=None):
|
||||
"""Return base64-encoded PNG charts for an analysis."""
|
||||
analysis = self.get_object()
|
||||
groups_qs = analysis.shot_groups.prefetch_related('shots').order_by('order', 'id')
|
||||
|
||||
groups_dfs = [_group_to_df(g) for g in groups_qs]
|
||||
groups_dfs = [gdf for gdf in groups_dfs if not gdf.empty]
|
||||
|
||||
if not groups_dfs:
|
||||
return Response({'overview': None, 'groups': []})
|
||||
|
||||
group_stats = compute_group_stats(groups_dfs)
|
||||
|
||||
all_speeds = [s for gs in group_stats for s in [gs.get('min_speed'), gs.get('max_speed')] if s is not None]
|
||||
y_min = min(all_speeds) if all_speeds else 0
|
||||
y_max = max(all_speeds) if all_speeds else 1000
|
||||
|
||||
return Response({
|
||||
'overview': render_overview_chart(group_stats),
|
||||
'groups': render_group_charts(groups_dfs, y_min, y_max),
|
||||
})
|
||||
|
||||
# ── PDF report ─────────────────────────────────────────────────────────────
|
||||
|
||||
@action(detail=True, methods=['get'], url_path='report.pdf')
|
||||
def report_pdf(self, request, pk=None):
|
||||
"""Generate and return a PDF analysis report."""
|
||||
analysis = self.get_object()
|
||||
groups_qs = analysis.shot_groups.prefetch_related('shots').order_by('order', 'id')
|
||||
|
||||
groups_dfs = [_group_to_df(g) for g in groups_qs]
|
||||
groups_dfs = [gdf for gdf in groups_dfs if not gdf.empty]
|
||||
|
||||
if not groups_dfs:
|
||||
return Response({'detail': 'No shot data available.'}, status=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
all_df = pd.concat(groups_dfs, ignore_index=True)
|
||||
overall_stats = compute_overall_stats(all_df)
|
||||
group_stats = compute_group_stats(groups_dfs)
|
||||
|
||||
all_speeds = [s for gs in group_stats for s in [gs.get('min_speed'), gs.get('max_speed')] if s is not None]
|
||||
y_min = min(all_speeds) if all_speeds else 0
|
||||
y_max = max(all_speeds) if all_speeds else 1000
|
||||
|
||||
group_charts = render_group_charts(groups_dfs, y_min, y_max)
|
||||
overview_chart = render_overview_chart(group_stats)
|
||||
|
||||
pdf_bytes = generate_pdf(overall_stats, group_stats, group_charts, overview_chart)
|
||||
|
||||
safe_name = analysis.name.replace(' ', '_').replace('/', '-')
|
||||
return HttpResponse(
|
||||
pdf_bytes,
|
||||
content_type='application/pdf',
|
||||
headers={'Content-Disposition': f'attachment; filename="{safe_name}.pdf"'},
|
||||
)
|
||||
Reference in New Issue
Block a user