Merge pull request 'Update web/core/views.py to have proper invalidscripture tools' (#4) from develop into main
Reviewed-on: https://git.lan/joshlaymon/Illustrations/pulls/4
This commit is contained in:
commit
a632775da9
@ -121,7 +121,7 @@ def search_page(request):
|
||||
- AND across tokens, OR across the selected fields
|
||||
|
||||
Special power term:
|
||||
- 'invalidscripture' -> entries whose Scripture would show red (invalid)
|
||||
- 'invalidscripture' -> entries whose Scripture would be INVALID per the JS validator
|
||||
"""
|
||||
default_fields = {
|
||||
"subject": True,
|
||||
@ -147,25 +147,134 @@ def search_page(request):
|
||||
|
||||
q = (request.GET.get("q") or "").strip()
|
||||
if q:
|
||||
# ===== SPECIAL POWER TERM =====
|
||||
# ===== SPECIAL POWER TERM (JS-compatible scripture validation) =====
|
||||
if q.lower() == "invalidscripture":
|
||||
# A simple server-side validity check that mirrors the front-end idea:
|
||||
# each piece must look like "<book> <chapter[:verses...]>"
|
||||
book_ch_re = re.compile(r"^.+?\s+\d{1,3}(?::\s*.+)?$")
|
||||
import re
|
||||
|
||||
# --- JS validator port (same logic as ScriptureValidator.isValidSingleRef) ---
|
||||
FULL_TO_CODE = {
|
||||
# OT
|
||||
"genesis":"Ge","exodus":"Ex","leviticus":"Le","numbers":"Nu","deuteronomy":"De",
|
||||
"joshua":"Jos","judges":"Jg","ruth":"Ru",
|
||||
"1 samuel":"1Sa","2 samuel":"2Sa","1 kings":"1Ki","2 kings":"2Ki",
|
||||
"1 chronicles":"1Ch","2 chronicles":"2Ch",
|
||||
"ezra":"Ezr","nehemiah":"Ne","esther":"Es","job":"Job","psalms":"Ps","psalm":"Ps",
|
||||
"proverbs":"Pr","ecclesiastes":"Ec","song of solomon":"Ca","song of songs":"Ca",
|
||||
"isaiah":"Isa","jeremiah":"Jer","lamentations":"La","ezekiel":"Eze","daniel":"Da",
|
||||
"hosea":"Ho","joel":"Joe","amos":"Am","obadiah":"Ob","jonah":"Jon","micah":"Mic",
|
||||
"nahum":"Na","habakkuk":"Hab","zephaniah":"Zep","haggai":"Hag","zechariah":"Zec","malachi":"Mal",
|
||||
# NT
|
||||
"matthew":"Mt","mark":"Mr","luke":"Lu","john":"Joh","acts":"Ac","romans":"Ro",
|
||||
"1 corinthians":"1Co","2 corinthians":"2Co",
|
||||
"galatians":"Ga","ephesians":"Eph","philippians":"Php","colossians":"Col",
|
||||
"1 thessalonians":"1Th","2 thessalonians":"2Th",
|
||||
"1 timothy":"1Ti","2 timothy":"2Ti",
|
||||
"titus":"Tit","philemon":"Phm","hebrews":"Heb","james":"Jas",
|
||||
"1 peter":"1Pe","2 peter":"2Pe",
|
||||
"1 john":"1Jo","2 john":"2Jo","3 john":"3Jo",
|
||||
"jude":"Jude","revelation":"Re",
|
||||
}
|
||||
ALIAS_TO_CODE = {
|
||||
# OT
|
||||
"gen":"Ge","exod":"Ex","lev":"Le","num":"Nu","deut":"De",
|
||||
"josh":"Jos","judg":"Jg","ps":"Ps","prov":"Pr","eccl":"Ec","song":"Ca","cant":"Ca",
|
||||
"isa":"Isa","jer":"Jer","lam":"La","ezek":"Eze","dan":"Da","hos":"Ho","joel":"Joe",
|
||||
"amos":"Am","obad":"Ob","jon":"Jon","mic":"Mic","nah":"Na","hab":"Hab","zeph":"Zep",
|
||||
"hag":"Hag","zech":"Zec","mal":"Mal",
|
||||
# NT
|
||||
"matt":"Mt","mark":"Mr","luke":"Lu","john":"Joh","acts":"Ac","rom":"Ro",
|
||||
"gal":"Ga","eph":"Eph","phil":"Php","col":"Col","heb":"Heb","jas":"Jas",
|
||||
"jude":"Jude","rev":"Re",
|
||||
}
|
||||
CODE_TO_NUM = {
|
||||
# OT
|
||||
"Ge":1,"Ex":2,"Le":3,"Nu":4,"De":5,"Jos":6,"Jg":7,"Ru":8,"1Sa":9,"2Sa":10,
|
||||
"1Ki":11,"2Ki":12,"1Ch":13,"2Ch":14,"Ezr":15,"Ne":16,"Es":17,"Job":18,
|
||||
"Ps":19,"Pr":20,"Ec":21,"Ca":22,"Isa":23,"Jer":24,"La":25,"Eze":26,"Da":27,"Ho":28,
|
||||
"Joe":29,"Am":30,"Ob":31,"Jon":32,"Mic":33,"Na":34,"Hab":35,"Zep":36,"Hag":37,"Zec":38,"Mal":39,
|
||||
# NT
|
||||
"Mt":40,"Mr":41,"Lu":42,"Joh":43,"Ac":44,"Ro":45,"1Co":46,"2Co":47,"Ga":48,"Eph":49,
|
||||
"Php":50,"Col":51,"1Th":52,"2Th":53,"1Ti":54,"2Ti":55,"Tit":56,"Phm":57,
|
||||
"Heb":58,"Jas":59,"1Pe":60,"2Pe":61,"1Jo":62,"2Jo":63,"3Jo":64,"Jude":65,"Re":66,
|
||||
}
|
||||
SERIES = [
|
||||
{"prefixes":["sam","samu","samuel"], "codes":{1:"1Sa",2:"2Sa"}},
|
||||
{"prefixes":["ki","king","kings","kgs"], "codes":{1:"1Ki",2:"2Ki"}},
|
||||
{"prefixes":["chron","chr","ch","chronicles"], "codes":{1:"1Ch",2:"2Ch"}},
|
||||
{"prefixes":["cor","corin","corinth","corinthians","co","c"], "codes":{1:"1Co",2:"2Co"}},
|
||||
{"prefixes":["thes","thess","thessalon","thessalonians","th"], "codes":{1:"1Th",2:"2Th"}},
|
||||
{"prefixes":["tim","ti","timothy","t"], "codes":{1:"1Ti",2:"2Ti"}},
|
||||
{"prefixes":["pet","pe","peter","pt","p"], "codes":{1:"1Pe",2:"2Pe"}},
|
||||
{"prefixes":["jo","jn","joh","john","jno","jhn"], "codes":{1:"1Jo",2:"2Jo",3:"3Jo"}},
|
||||
]
|
||||
WOL_ABBR = set(CODE_TO_NUM.keys())
|
||||
versesRe = re.compile(r"""
|
||||
^
|
||||
(?:
|
||||
(\d{1,3}) # chapter only
|
||||
|
|
||||
(\d{1,3})\s*:\s*
|
||||
(
|
||||
\d{1,3} # v1
|
||||
(?:\s*-\s*(?:\d{1,3}|\d{1,3}:\d{1,3}))? # -v2 OR -ch:vs
|
||||
(?:\s*,\s*\d{1,3}(?:\s*-\s*(?:\d{1,3}|\d{1,3}:\d{1,3}))?)* # ,vN[-…]
|
||||
)
|
||||
)
|
||||
$
|
||||
""", re.VERBOSE)
|
||||
|
||||
def _norm_spaces(s): return re.sub(r"\s+", " ", (s or "").strip())
|
||||
def _strip_dots(s): return re.sub(r"\.+$", "", s or "")
|
||||
def _lower(s): return (s or "").lower()
|
||||
|
||||
def _lookup_book_code(book_raw: str):
|
||||
b = _norm_spaces(_strip_dots(book_raw))
|
||||
# Full names
|
||||
c = FULL_TO_CODE.get(_lower(b))
|
||||
if c: return c
|
||||
# Aliases
|
||||
c = ALIAS_TO_CODE.get(_lower(b))
|
||||
if c: return c
|
||||
# WOL abbr (allow a space after the number, and arbitrary spaces)
|
||||
tightened = re.sub(r"^([1-3])\s+([A-Za-z].*)$", r"\1\2", b)
|
||||
if tightened in WOL_ABBR: return tightened
|
||||
no_space = re.sub(r"\s+", "", b)
|
||||
if no_space in WOL_ABBR: return no_space
|
||||
# Numbered prose (e.g., "2 Sam", "1 Chron", "3 Jo")
|
||||
m = re.match(r"^([1-3])\s*([A-Za-z]+)$", _lower(b))
|
||||
if m:
|
||||
n = int(m.group(1)); base = m.group(2)
|
||||
for fam in SERIES:
|
||||
if any(base.startswith(p) for p in fam["prefixes"]):
|
||||
code = fam["codes"].get(n)
|
||||
if code: return code
|
||||
return None
|
||||
|
||||
def _split_book_and_rest(s: str):
|
||||
m = re.match(r"^(.+?)\s+(\d{1,3}(?:\s*:\s*.*)?)$", s)
|
||||
return {"book": m.group(1), "rest": m.group(2)} if m else None
|
||||
|
||||
def _is_valid_single_ref(ref: str) -> bool:
|
||||
s = (ref or "").strip()
|
||||
if not s: return False
|
||||
parts = _split_book_and_rest(s)
|
||||
if not parts: return False
|
||||
if not _lookup_book_code(parts["book"]): return False
|
||||
rest = (parts.get("rest") or "").trim() if hasattr(str, "trim") else (parts.get("rest") or "").strip()
|
||||
if not rest: return False
|
||||
return bool(versesRe.match(rest))
|
||||
|
||||
def _field_is_valid(text: str) -> bool:
|
||||
pieces = [p.strip() for p in (text or "").split(";") if p.strip()]
|
||||
if not pieces: # empty field considered neutral/invalid? UI treats empty as neither; we exclude empties anyway
|
||||
return False
|
||||
return all(_is_valid_single_ref(p) for p in pieces)
|
||||
# --- end JS port ---
|
||||
|
||||
invalid_ids = []
|
||||
qs_all = Entry.objects.exclude(scripture_raw="").only("id", "scripture_raw", "date_added")
|
||||
for e in qs_all.iterator(chunk_size=1000):
|
||||
original = (e.scripture_raw or "").strip()
|
||||
norm, warns = normalize_scripture_field(original)
|
||||
|
||||
# Split into pieces as the UI does
|
||||
pieces = [p.strip() for p in original.split(";") if p.strip()]
|
||||
# Invalid if:
|
||||
# - normalizer produced warnings (e.g., verses but no book), OR
|
||||
# - any piece fails "<book> <chapter[:verses...]>" quick check
|
||||
any_bad_shape = any(not book_ch_re.match(p) for p in pieces)
|
||||
if warns or any_bad_shape:
|
||||
qs = Entry.objects.exclude(scripture_raw="").only("id", "scripture_raw", "date_added")
|
||||
for e in qs.iterator(chunk_size=1000):
|
||||
if not _field_is_valid(e.scripture_raw or ""):
|
||||
invalid_ids.append(e.id)
|
||||
|
||||
ids = list(
|
||||
@ -183,8 +292,7 @@ def search_page(request):
|
||||
request.session["last_search"] = {"q": q, "fields": ["scripture_raw"]}
|
||||
request.session.modified = True
|
||||
|
||||
count = len(ids)
|
||||
if count:
|
||||
if ids:
|
||||
entry = Entry.objects.get(pk=ids[0])
|
||||
ctx = entry_context(entry, ids)
|
||||
ctx.update({"from_search": True})
|
||||
@ -207,6 +315,7 @@ def search_page(request):
|
||||
)
|
||||
# ===== END SPECIAL TERM =====
|
||||
|
||||
# --- existing search flow ---
|
||||
tokens = terms(q)
|
||||
fields = [f for f, sel in selected.items() if sel] or ["subject"]
|
||||
|
||||
@ -242,11 +351,10 @@ def search_page(request):
|
||||
pass
|
||||
|
||||
request.session["result_ids"] = ids
|
||||
count = len(ids)
|
||||
request.session["last_search"] = {"q": q, "fields": fields}
|
||||
request.session.modified = True
|
||||
|
||||
if count:
|
||||
if ids:
|
||||
entry = Entry.objects.get(pk=ids[0])
|
||||
ctx = entry_context(entry, ids)
|
||||
ctx.update({"from_search": True})
|
||||
|
||||
Loading…
Reference in New Issue
Block a user