Update web/core/views.py

This commit is contained in:
Joshua Laymon 2025-09-07 18:44:10 +00:00
parent 99434e29f5
commit 6a1d3beef9

View File

@ -121,7 +121,7 @@ def search_page(request):
- AND across tokens, OR across the selected fields - AND across tokens, OR across the selected fields
Special power term: Special power term:
- 'invalidscripture' -> returns only entries whose scripture would trigger red pills (validation warnings) - 'invalidscripture' -> entries whose Scripture would show red (invalid)
""" """
default_fields = { default_fields = {
"subject": True, "subject": True,
@ -147,17 +147,27 @@ def search_page(request):
q = (request.GET.get("q") or "").strip() q = (request.GET.get("q") or "").strip()
if q: if q:
# ✨ Special command: list entries whose Scripture validation would fail (red pills) # ===== SPECIAL POWER TERM =====
if q.lower() == "invalidscripture": if q.lower() == "invalidscripture":
qs = Entry.objects.exclude(scripture_raw="").only("id", "scripture_raw", "date_added") # A simple server-side validity check that mirrors the front-end idea:
# each piece must look like "<book> <chapter[:verses...]>"
book_ch_re = re.compile(r"^.+?\s+\d{1,3}(?::\s*.+)?$")
invalid_ids = [] invalid_ids = []
# Reuse the same rules your front-end uses qs_all = Entry.objects.exclude(scripture_raw="").only("id", "scripture_raw", "date_added")
for e in qs.iterator(chunk_size=1000): for e in qs_all.iterator(chunk_size=1000):
_norm, warnings = normalize_scripture_field((e.scripture_raw or "").strip()) original = (e.scripture_raw or "").strip()
if warnings: norm, warns = normalize_scripture_field(original)
# Split into pieces as the UI does
pieces = [p.strip() for p in original.split(";") if p.strip()]
# Invalid if:
# - normalizer produced warnings (e.g., verses but no book), OR
# - any piece fails "<book> <chapter[:verses...]>" quick check
any_bad_shape = any(not book_ch_re.match(p) for p in pieces)
if warns or any_bad_shape:
invalid_ids.append(e.id) invalid_ids.append(e.id)
# Keep your standard sort order (newest first, then id desc)
ids = list( ids = list(
Entry.objects.filter(id__in=invalid_ids) Entry.objects.filter(id__in=invalid_ids)
.order_by("-date_added", "-id") .order_by("-date_added", "-id")
@ -195,8 +205,8 @@ def search_page(request):
"result_count": 0, "result_count": 0,
}, },
) )
# ===== END SPECIAL TERM =====
# --- existing search flow ---
tokens = terms(q) tokens = terms(q)
fields = [f for f, sel in selected.items() if sel] or ["subject"] fields = [f for f, sel in selected.items() if sel] or ["subject"]
@ -233,19 +243,15 @@ def search_page(request):
request.session["result_ids"] = ids request.session["result_ids"] = ids
count = len(ids) count = len(ids)
# Ensure highlighter data is available BEFORE navigating to entry_view
request.session["last_search"] = {"q": q, "fields": fields} request.session["last_search"] = {"q": q, "fields": fields}
request.session.modified = True # be explicit so its flushed request.session.modified = True
if count: if count:
entry = Entry.objects.get(pk=ids[0]) entry = Entry.objects.get(pk=ids[0])
ctx = entry_context(entry, ids) ctx = entry_context(entry, ids)
ctx.update({"from_search": True}) ctx.update({"from_search": True})
# 🔽 ADD THIS
if request.user.is_staff: if request.user.is_staff:
ctx["tts_url"] = reverse("api_tts_for_entry", args=[entry.id]) ctx["tts_url"] = reverse("api_tts_for_entry", args=[entry.id])
return render(request, "entry_view.html", ctx) return render(request, "entry_view.html", ctx)
total = Entry.objects.count() total = Entry.objects.count()