UX improvements: mobile bottom sheet, cuisine taxonomy, search enhancements
- Add BottomSheet component for Google Maps-style restaurant detail on mobile (3-snap drag: 40%/55%/92%, velocity-based close, backdrop overlay) - Mobile map mode now full-screen with bottom sheet overlay for details - Collapsible filter panel on mobile with active filter badge count - Standardized cuisine taxonomy (46 categories: 한식|국밥, 일식|스시 etc.) with LLM remap endpoint and admin UI button - Enhanced search: keyword search now includes foods_mentioned + video title - Search results include channels array for frontend filtering - Channel filter moved to frontend filteredRestaurants (not API-level) - LLM extraction prompt updated for pipe-delimited region + cuisine taxonomy - Vector rebuild endpoint with rich JSON chunks per restaurant - Geolocation-based auto region selection on page load - Desktop filters split into two clean rows Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -9,11 +9,14 @@ import random
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from fastapi import APIRouter, Query
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
||||
from api.deps import get_admin_user
|
||||
|
||||
from core.db import conn
|
||||
from core.pipeline import process_pending
|
||||
from core import cache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
@@ -23,11 +26,9 @@ _executor = ThreadPoolExecutor(max_workers=4)
|
||||
@router.get("")
|
||||
def list_videos(
|
||||
status: str | None = None,
|
||||
limit: int = Query(50, le=500),
|
||||
offset: int = Query(0, ge=0),
|
||||
):
|
||||
conditions = []
|
||||
params: dict = {"lim": limit, "off": offset}
|
||||
params: dict = {}
|
||||
if status:
|
||||
conditions.append("v.status = :st")
|
||||
params["st"] = status
|
||||
@@ -44,7 +45,6 @@ def list_videos(
|
||||
JOIN channels c ON c.id = v.channel_id
|
||||
{where}
|
||||
ORDER BY v.published_at DESC NULLS LAST
|
||||
OFFSET :off ROWS FETCH NEXT :lim ROWS ONLY
|
||||
"""
|
||||
with conn() as c:
|
||||
cur = c.cursor()
|
||||
@@ -100,7 +100,7 @@ def bulk_extract_pending_count():
|
||||
|
||||
|
||||
@router.post("/bulk-extract")
|
||||
def bulk_extract():
|
||||
def bulk_extract(_admin: dict = Depends(get_admin_user)):
|
||||
"""Process all unextracted videos with random delays. Streams SSE progress."""
|
||||
from core.pipeline import process_video_extract
|
||||
|
||||
@@ -131,6 +131,8 @@ def bulk_extract():
|
||||
logger.error("Bulk extract error for %s: %s", v["video_id"], e)
|
||||
yield f"data: {_json.dumps({'type': 'error', 'index': i, 'title': v['title'], 'message': str(e)})}\n\n"
|
||||
|
||||
if total_restaurants > 0:
|
||||
cache.flush()
|
||||
yield f"data: {_json.dumps({'type': 'complete', 'total': total, 'total_restaurants': total_restaurants})}\n\n"
|
||||
|
||||
return StreamingResponse(generate(), media_type="text/event-stream")
|
||||
@@ -159,7 +161,7 @@ def bulk_transcript_pending_count():
|
||||
|
||||
|
||||
@router.post("/bulk-transcript")
|
||||
def bulk_transcript():
|
||||
def bulk_transcript(_admin: dict = Depends(get_admin_user)):
|
||||
"""Fetch transcripts for all videos missing them. Streams SSE progress."""
|
||||
from core.youtube import get_transcript
|
||||
|
||||
@@ -196,11 +198,133 @@ def bulk_transcript():
|
||||
logger.error("Bulk transcript error for %s: %s", v["video_id"], e)
|
||||
yield f"data: {_json.dumps({'type': 'error', 'index': i, 'title': v['title'], 'message': str(e)})}\n\n"
|
||||
|
||||
if success > 0:
|
||||
cache.flush()
|
||||
yield f"data: {_json.dumps({'type': 'complete', 'total': total, 'success': success})}\n\n"
|
||||
|
||||
return StreamingResponse(generate(), media_type="text/event-stream")
|
||||
|
||||
|
||||
@router.post("/remap-cuisine")
|
||||
def remap_cuisine(_admin: dict = Depends(get_admin_user)):
|
||||
"""Remap all restaurant cuisine_type using LLM. Streams SSE progress."""
|
||||
from core.cuisine import build_remap_prompt, CUISINE_TYPES, VALID_PREFIXES
|
||||
from core.extractor import _llm, _parse_json
|
||||
from core.db import conn as db_conn
|
||||
|
||||
BATCH = 20 # restaurants per LLM call (smaller for better accuracy)
|
||||
|
||||
def _apply_batch(batch: list[dict], valid_set: set[str]) -> tuple[int, list[dict]]:
|
||||
"""Run LLM on a batch. Returns (updated_count, missed_items)."""
|
||||
prompt = build_remap_prompt(batch)
|
||||
raw = _llm(prompt, max_tokens=4096)
|
||||
result = _parse_json(raw)
|
||||
if not isinstance(result, list):
|
||||
result = []
|
||||
|
||||
result_map = {}
|
||||
for item in result:
|
||||
rid = item.get("id")
|
||||
new_type = item.get("cuisine_type")
|
||||
if rid and new_type:
|
||||
result_map[rid] = new_type
|
||||
|
||||
updated = 0
|
||||
missed = []
|
||||
for r in batch:
|
||||
rid = r["id"]
|
||||
new_type = result_map.get(rid)
|
||||
if not new_type:
|
||||
missed.append(r)
|
||||
continue
|
||||
# Accept if exact match or valid prefix
|
||||
if new_type not in valid_set and not new_type.startswith(VALID_PREFIXES):
|
||||
missed.append(r)
|
||||
continue
|
||||
with db_conn() as c:
|
||||
c.cursor().execute(
|
||||
"UPDATE restaurants SET cuisine_type = :ct WHERE id = :id",
|
||||
{"ct": new_type, "id": rid},
|
||||
)
|
||||
updated += 1
|
||||
|
||||
return updated, missed
|
||||
|
||||
def generate():
|
||||
sql = """
|
||||
SELECT r.id, r.name, r.cuisine_type,
|
||||
(SELECT LISTAGG(vr.foods_mentioned, '|') WITHIN GROUP (ORDER BY vr.id)
|
||||
FROM video_restaurants vr WHERE vr.restaurant_id = r.id) AS foods
|
||||
FROM restaurants r
|
||||
WHERE EXISTS (SELECT 1 FROM video_restaurants vr2 WHERE vr2.restaurant_id = r.id)
|
||||
ORDER BY r.name
|
||||
"""
|
||||
with db_conn() as c:
|
||||
cur = c.cursor()
|
||||
cur.execute(sql)
|
||||
rows = []
|
||||
for row in cur.fetchall():
|
||||
foods_raw = row[3].read() if hasattr(row[3], "read") else row[3]
|
||||
rows.append({"id": row[0], "name": row[1], "cuisine_type": row[2], "foods_mentioned": foods_raw})
|
||||
|
||||
total = len(rows)
|
||||
yield f"data: {_json.dumps({'type': 'start', 'total': total})}\n\n"
|
||||
|
||||
valid_set = set(CUISINE_TYPES)
|
||||
updated = 0
|
||||
all_missed: list[dict] = []
|
||||
|
||||
# Pass 1: process all in batches
|
||||
for i in range(0, total, BATCH):
|
||||
batch = rows[i : i + BATCH]
|
||||
yield f"data: {_json.dumps({'type': 'processing', 'current': min(i + BATCH, total), 'total': total, 'pass': 1})}\n\n"
|
||||
try:
|
||||
cnt, missed = _apply_batch(batch, valid_set)
|
||||
updated += cnt
|
||||
all_missed.extend(missed)
|
||||
yield f"data: {_json.dumps({'type': 'batch_done', 'current': min(i + BATCH, total), 'total': total, 'updated': updated, 'missed': len(all_missed)})}\n\n"
|
||||
except Exception as e:
|
||||
logger.error("Remap batch error at %d: %s", i, e, exc_info=True)
|
||||
all_missed.extend(batch)
|
||||
yield f"data: {_json.dumps({'type': 'error', 'message': str(e), 'current': i})}\n\n"
|
||||
|
||||
# Pass 2: retry missed items (smaller batches for accuracy)
|
||||
if all_missed:
|
||||
yield f"data: {_json.dumps({'type': 'retry', 'missed': len(all_missed)})}\n\n"
|
||||
RETRY_BATCH = 10
|
||||
for i in range(0, len(all_missed), RETRY_BATCH):
|
||||
batch = all_missed[i : i + RETRY_BATCH]
|
||||
try:
|
||||
cnt, _ = _apply_batch(batch, valid_set)
|
||||
updated += cnt
|
||||
yield f"data: {_json.dumps({'type': 'batch_done', 'current': min(i + RETRY_BATCH, len(all_missed)), 'total': len(all_missed), 'updated': updated, 'pass': 2})}\n\n"
|
||||
except Exception as e:
|
||||
logger.error("Remap retry error at %d: %s", i, e, exc_info=True)
|
||||
|
||||
cache.flush()
|
||||
yield f"data: {_json.dumps({'type': 'complete', 'total': total, 'updated': updated})}\n\n"
|
||||
|
||||
return StreamingResponse(generate(), media_type="text/event-stream")
|
||||
|
||||
|
||||
@router.post("/rebuild-vectors")
|
||||
def rebuild_vectors(_admin: dict = Depends(get_admin_user)):
|
||||
"""Rebuild all restaurant vector embeddings. Streams SSE progress."""
|
||||
from core import vector
|
||||
|
||||
def generate():
|
||||
yield f"data: {_json.dumps({'type': 'start'})}\n\n"
|
||||
try:
|
||||
for progress in vector.rebuild_all_vectors():
|
||||
yield f"data: {_json.dumps({'type': progress.get('status', 'progress'), **progress})}\n\n"
|
||||
cache.flush()
|
||||
except Exception as e:
|
||||
logger.error("Rebuild vectors error: %s", e, exc_info=True)
|
||||
yield f"data: {_json.dumps({'type': 'error', 'message': str(e)})}\n\n"
|
||||
|
||||
return StreamingResponse(generate(), media_type="text/event-stream")
|
||||
|
||||
|
||||
@router.get("/extract/prompt")
|
||||
def get_extract_prompt():
|
||||
"""Get the current LLM extraction prompt template."""
|
||||
@@ -209,11 +333,14 @@ def get_extract_prompt():
|
||||
|
||||
|
||||
def _do_process(limit: int):
|
||||
return {"restaurants_extracted": process_pending(limit)}
|
||||
result = process_pending(limit)
|
||||
if result > 0:
|
||||
cache.flush()
|
||||
return {"restaurants_extracted": result}
|
||||
|
||||
|
||||
@router.post("/process")
|
||||
async def trigger_processing(limit: int = Query(5, le=20)):
|
||||
async def trigger_processing(limit: int = Query(5, le=20), _admin: dict = Depends(get_admin_user)):
|
||||
"""Manually trigger processing of pending videos (non-blocking)."""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(_executor, _do_process, limit)
|
||||
@@ -318,11 +445,12 @@ def _do_fetch_transcript(video_db_id: str, mode: str):
|
||||
{"txt": transcript, "vid": video_db_id},
|
||||
)
|
||||
|
||||
cache.flush()
|
||||
return {"ok": True, "length": len(transcript), "source": source}
|
||||
|
||||
|
||||
@router.post("/{video_db_id}/fetch-transcript")
|
||||
async def fetch_transcript(video_db_id: str, mode: str = Query("auto")):
|
||||
async def fetch_transcript(video_db_id: str, mode: str = Query("auto"), _admin: dict = Depends(get_admin_user)):
|
||||
"""Fetch and save transcript for a video (non-blocking)."""
|
||||
from fastapi import HTTPException
|
||||
|
||||
@@ -359,11 +487,12 @@ def _do_extract(video_db_id: str, custom_prompt: str | None):
|
||||
transcript,
|
||||
custom_prompt=custom_prompt,
|
||||
)
|
||||
cache.flush()
|
||||
return {"ok": True, "restaurants_extracted": count}
|
||||
|
||||
|
||||
@router.post("/{video_db_id}/extract")
|
||||
async def extract_restaurants_from_video(video_db_id: str, body: dict = None):
|
||||
async def extract_restaurants_from_video(video_db_id: str, body: dict = None, _admin: dict = Depends(get_admin_user)):
|
||||
"""Run LLM extraction on an existing transcript (non-blocking)."""
|
||||
from fastapi import HTTPException
|
||||
custom_prompt = body.get("prompt") if body else None
|
||||
@@ -375,7 +504,7 @@ async def extract_restaurants_from_video(video_db_id: str, body: dict = None):
|
||||
|
||||
|
||||
@router.post("/{video_db_id}/skip")
|
||||
def skip_video(video_db_id: str):
|
||||
def skip_video(video_db_id: str, _admin: dict = Depends(get_admin_user)):
|
||||
"""Mark a video as skipped."""
|
||||
from fastapi import HTTPException
|
||||
with conn() as c:
|
||||
@@ -386,11 +515,12 @@ def skip_video(video_db_id: str):
|
||||
)
|
||||
if cur.rowcount == 0:
|
||||
raise HTTPException(404, "Video not found")
|
||||
cache.flush()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@router.delete("/{video_db_id}")
|
||||
def delete_video(video_db_id: str):
|
||||
def delete_video(video_db_id: str, _admin: dict = Depends(get_admin_user)):
|
||||
"""Delete a video and its related data."""
|
||||
from core.db import conn as get_conn
|
||||
with get_conn() as c:
|
||||
@@ -441,11 +571,12 @@ def delete_video(video_db_id: str):
|
||||
if cur.rowcount == 0:
|
||||
from fastapi import HTTPException
|
||||
raise HTTPException(404, "Video not found")
|
||||
cache.flush()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@router.put("/{video_db_id}")
|
||||
def update_video(video_db_id: str, body: dict):
|
||||
def update_video(video_db_id: str, body: dict, _admin: dict = Depends(get_admin_user)):
|
||||
"""Update video title."""
|
||||
from fastapi import HTTPException
|
||||
title = body.get("title")
|
||||
@@ -459,11 +590,12 @@ def update_video(video_db_id: str, body: dict):
|
||||
)
|
||||
if cur.rowcount == 0:
|
||||
raise HTTPException(404, "Video not found")
|
||||
cache.flush()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@router.delete("/{video_db_id}/restaurants/{restaurant_id}")
|
||||
def delete_video_restaurant(video_db_id: str, restaurant_id: str):
|
||||
def delete_video_restaurant(video_db_id: str, restaurant_id: str, _admin: dict = Depends(get_admin_user)):
|
||||
"""Delete a video-restaurant mapping. Also cleans up orphaned restaurant."""
|
||||
from fastapi import HTTPException
|
||||
with conn() as c:
|
||||
@@ -487,11 +619,12 @@ def delete_video_restaurant(video_db_id: str, restaurant_id: str):
|
||||
DELETE FROM restaurants WHERE id = :rid
|
||||
AND NOT EXISTS (SELECT 1 FROM video_restaurants WHERE restaurant_id = :rid)
|
||||
""", {"rid": restaurant_id})
|
||||
cache.flush()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@router.post("/{video_db_id}/restaurants/manual")
|
||||
def add_manual_restaurant(video_db_id: str, body: dict):
|
||||
def add_manual_restaurant(video_db_id: str, body: dict, _admin: dict = Depends(get_admin_user)):
|
||||
"""Manually add a restaurant and link it to a video."""
|
||||
from fastapi import HTTPException
|
||||
from core import restaurant as rest_mod
|
||||
@@ -538,11 +671,12 @@ def add_manual_restaurant(video_db_id: str, body: dict):
|
||||
guests=guests if isinstance(guests, list) else [],
|
||||
)
|
||||
|
||||
cache.flush()
|
||||
return {"ok": True, "restaurant_id": rid, "link_id": link_id}
|
||||
|
||||
|
||||
@router.put("/{video_db_id}/restaurants/{restaurant_id}")
|
||||
def update_video_restaurant(video_db_id: str, restaurant_id: str, body: dict):
|
||||
def update_video_restaurant(video_db_id: str, restaurant_id: str, body: dict, _admin: dict = Depends(get_admin_user)):
|
||||
"""Update restaurant info linked to a video.
|
||||
|
||||
If name changed, re-geocode and remap to a new restaurant record.
|
||||
@@ -552,6 +686,9 @@ def update_video_restaurant(video_db_id: str, restaurant_id: str, body: dict):
|
||||
|
||||
# Check if name changed — need to remap
|
||||
new_name = body.get("name", "").strip() if "name" in body else None
|
||||
name_changed = False
|
||||
active_rid = restaurant_id
|
||||
|
||||
if new_name:
|
||||
with conn() as c:
|
||||
cur = c.cursor()
|
||||
@@ -560,101 +697,126 @@ def update_video_restaurant(video_db_id: str, restaurant_id: str, body: dict):
|
||||
old_name = row[0] if row else ""
|
||||
|
||||
if old_name != new_name:
|
||||
# Name changed: geocode new restaurant, remap
|
||||
name_changed = True
|
||||
from core import restaurant as rest_mod
|
||||
from core.geocoding import geocode_restaurant
|
||||
|
||||
address = body.get("address", "").strip() or body.get("region", "").strip() or ""
|
||||
address = (body.get("address") or "").strip() or (body.get("region") or "").strip() or ""
|
||||
geo = geocode_restaurant(new_name, address)
|
||||
if not geo:
|
||||
raise HTTPException(400, f"'{new_name}' 위치를 찾을 수 없습니다.")
|
||||
|
||||
new_rid = rest_mod.upsert(
|
||||
name=new_name,
|
||||
address=geo.get("formatted_address") or body.get("address"),
|
||||
region=body.get("region"),
|
||||
latitude=geo["latitude"],
|
||||
longitude=geo["longitude"],
|
||||
cuisine_type=body.get("cuisine_type"),
|
||||
price_range=body.get("price_range"),
|
||||
google_place_id=geo.get("google_place_id"),
|
||||
phone=geo.get("phone"),
|
||||
website=geo.get("website"),
|
||||
business_status=geo.get("business_status"),
|
||||
rating=geo.get("rating"),
|
||||
rating_count=geo.get("rating_count"),
|
||||
)
|
||||
|
||||
# Read existing mapping data, delete old, create new
|
||||
with conn() as c:
|
||||
cur = c.cursor()
|
||||
cur.execute(
|
||||
"SELECT foods_mentioned, evaluation, guests FROM video_restaurants WHERE video_id = :vid AND restaurant_id = :rid",
|
||||
{"vid": video_db_id, "rid": restaurant_id},
|
||||
)
|
||||
old_vr = cur.fetchone()
|
||||
|
||||
cur.execute(
|
||||
"DELETE FROM video_restaurants WHERE video_id = :vid AND restaurant_id = :rid",
|
||||
{"vid": video_db_id, "rid": restaurant_id},
|
||||
# Geocode failed — just rename in place without remapping
|
||||
with conn() as c:
|
||||
cur = c.cursor()
|
||||
cur.execute("UPDATE restaurants SET name = :name, updated_at = SYSTIMESTAMP WHERE id = :rid",
|
||||
{"name": new_name, "rid": restaurant_id})
|
||||
else:
|
||||
new_rid = rest_mod.upsert(
|
||||
name=new_name,
|
||||
address=geo.get("formatted_address") or body.get("address"),
|
||||
region=body.get("region"),
|
||||
latitude=geo["latitude"],
|
||||
longitude=geo["longitude"],
|
||||
cuisine_type=body.get("cuisine_type"),
|
||||
price_range=body.get("price_range"),
|
||||
google_place_id=geo.get("google_place_id"),
|
||||
phone=geo.get("phone"),
|
||||
website=geo.get("website"),
|
||||
business_status=geo.get("business_status"),
|
||||
rating=geo.get("rating"),
|
||||
rating_count=geo.get("rating_count"),
|
||||
)
|
||||
|
||||
# Build new mapping values from body or old data
|
||||
def _parse(val, default):
|
||||
if val is None:
|
||||
return default
|
||||
if hasattr(val, "read"):
|
||||
val = val.read()
|
||||
if isinstance(val, (list, dict)):
|
||||
return val
|
||||
try:
|
||||
return _json.loads(val)
|
||||
except Exception:
|
||||
return default
|
||||
# Read existing mapping data, delete old, create new
|
||||
with conn() as c:
|
||||
cur = c.cursor()
|
||||
cur.execute(
|
||||
"SELECT foods_mentioned, evaluation, guests FROM video_restaurants WHERE video_id = :vid AND restaurant_id = :rid",
|
||||
{"vid": video_db_id, "rid": restaurant_id},
|
||||
)
|
||||
old_vr = cur.fetchone()
|
||||
|
||||
old_foods = _parse(old_vr[0], []) if old_vr else []
|
||||
old_eval = _parse(old_vr[1], {}) if old_vr else {}
|
||||
old_guests = _parse(old_vr[2], []) if old_vr else []
|
||||
cur.execute(
|
||||
"DELETE FROM video_restaurants WHERE video_id = :vid AND restaurant_id = :rid",
|
||||
{"vid": video_db_id, "rid": restaurant_id},
|
||||
)
|
||||
|
||||
foods = body.get("foods_mentioned", old_foods)
|
||||
evaluation = body.get("evaluation", old_eval)
|
||||
guests = body.get("guests", old_guests)
|
||||
def _parse(val, default):
|
||||
if val is None:
|
||||
return default
|
||||
if hasattr(val, "read"):
|
||||
val = val.read()
|
||||
if isinstance(val, (list, dict)):
|
||||
return val
|
||||
try:
|
||||
return _json.loads(val)
|
||||
except Exception:
|
||||
return default
|
||||
|
||||
eval_text = evaluation.get("text", "") if isinstance(evaluation, dict) else str(evaluation or "")
|
||||
old_foods = _parse(old_vr[0], []) if old_vr else []
|
||||
old_eval = _parse(old_vr[1], {}) if old_vr else {}
|
||||
old_guests = _parse(old_vr[2], []) if old_vr else []
|
||||
|
||||
rest_mod.link_video_restaurant(
|
||||
video_db_id=video_db_id,
|
||||
restaurant_id=new_rid,
|
||||
foods=foods if isinstance(foods, list) else [],
|
||||
evaluation=eval_text or None,
|
||||
guests=guests if isinstance(guests, list) else [],
|
||||
)
|
||||
foods = body.get("foods_mentioned", old_foods)
|
||||
evaluation = body.get("evaluation", old_eval)
|
||||
guests = body.get("guests", old_guests)
|
||||
|
||||
return {"ok": True, "remapped": True, "new_restaurant_id": new_rid}
|
||||
eval_text = evaluation.get("text", "") if isinstance(evaluation, dict) else str(evaluation or "")
|
||||
|
||||
# No name change — update in place
|
||||
with conn() as c:
|
||||
cur = c.cursor()
|
||||
r_sets = []
|
||||
r_params: dict = {"rid": restaurant_id}
|
||||
for field in ("name", "address", "region", "cuisine_type", "price_range"):
|
||||
if field in body:
|
||||
r_sets.append(f"{field} = :{field}")
|
||||
r_params[field] = body[field]
|
||||
if r_sets:
|
||||
r_sets.append("updated_at = SYSTIMESTAMP")
|
||||
sql = f"UPDATE restaurants SET {', '.join(r_sets)} WHERE id = :rid"
|
||||
cur.execute(sql, r_params)
|
||||
rest_mod.link_video_restaurant(
|
||||
video_db_id=video_db_id,
|
||||
restaurant_id=new_rid,
|
||||
foods=foods if isinstance(foods, list) else [],
|
||||
evaluation=eval_text or None,
|
||||
guests=guests if isinstance(guests, list) else [],
|
||||
)
|
||||
|
||||
vr_params: dict = {"vid": video_db_id, "rid": restaurant_id}
|
||||
vr_sets = []
|
||||
for field in ("foods_mentioned", "evaluation", "guests"):
|
||||
if field in body:
|
||||
vr_sets.append(f"{field} = :{field}")
|
||||
val = body[field]
|
||||
vr_params[field] = _json.dumps(val, ensure_ascii=False) if isinstance(val, (list, dict)) else val
|
||||
if vr_sets:
|
||||
sql = f"UPDATE video_restaurants SET {', '.join(vr_sets)} WHERE video_id = :vid AND restaurant_id = :rid"
|
||||
cur.execute(sql, vr_params)
|
||||
active_rid = new_rid
|
||||
|
||||
return {"ok": True}
|
||||
# 기존 식당이 다른 영상 매핑이 없으면 고아 → 삭제
|
||||
if new_rid != restaurant_id:
|
||||
with conn() as c:
|
||||
cur = c.cursor()
|
||||
cur.execute(
|
||||
"SELECT COUNT(*) FROM video_restaurants WHERE restaurant_id = :rid",
|
||||
{"rid": restaurant_id},
|
||||
)
|
||||
remaining = cur.fetchone()[0]
|
||||
if remaining == 0:
|
||||
cur.execute("DELETE FROM restaurant_vectors WHERE restaurant_id = :rid", {"rid": restaurant_id})
|
||||
cur.execute("DELETE FROM user_reviews WHERE restaurant_id = :rid", {"rid": restaurant_id})
|
||||
cur.execute("DELETE FROM user_favorites WHERE restaurant_id = :rid", {"rid": restaurant_id})
|
||||
cur.execute("DELETE FROM restaurants WHERE id = :rid", {"rid": restaurant_id})
|
||||
|
||||
# Update remaining fields in place (skip name if already remapped)
|
||||
if not name_changed:
|
||||
with conn() as c:
|
||||
cur = c.cursor()
|
||||
r_sets = []
|
||||
r_params: dict = {"rid": active_rid}
|
||||
for field in ("address", "region", "cuisine_type", "price_range"):
|
||||
if field in body:
|
||||
r_sets.append(f"{field} = :{field}")
|
||||
r_params[field] = body[field]
|
||||
if r_sets:
|
||||
r_sets.append("updated_at = SYSTIMESTAMP")
|
||||
sql = f"UPDATE restaurants SET {', '.join(r_sets)} WHERE id = :rid"
|
||||
cur.execute(sql, r_params)
|
||||
|
||||
vr_params: dict = {"vid": video_db_id, "rid": active_rid}
|
||||
vr_sets = []
|
||||
for field in ("foods_mentioned", "evaluation", "guests"):
|
||||
if field in body:
|
||||
vr_sets.append(f"{field} = :{field}")
|
||||
val = body[field]
|
||||
vr_params[field] = _json.dumps(val, ensure_ascii=False) if isinstance(val, (list, dict)) else val
|
||||
if vr_sets:
|
||||
sql = f"UPDATE video_restaurants SET {', '.join(vr_sets)} WHERE video_id = :vid AND restaurant_id = :rid"
|
||||
cur.execute(sql, vr_params)
|
||||
|
||||
cache.flush()
|
||||
result: dict = {"ok": True}
|
||||
if name_changed:
|
||||
result["remapped"] = active_rid != restaurant_id
|
||||
if active_rid != restaurant_id:
|
||||
result["new_restaurant_id"] = active_rid
|
||||
return result
|
||||
|
||||
Reference in New Issue
Block a user