Backend enhancements: auth, channels, restaurants, daemon improvements

- Add admin auth dependency and role checks
- Expand channel and restaurant API routes
- Improve YouTube transcript fetching
- Enhance daemon worker with better error handling and scheduling

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
joungmin
2026-03-09 10:59:22 +09:00
parent d6afb62c18
commit 6c47d3c57d
9 changed files with 208 additions and 42 deletions

View File

@@ -72,12 +72,22 @@ def deactivate_channel_by_db_id(db_id: str) -> bool:
def get_active_channels() -> list[dict]:
sql = "SELECT id, channel_id, channel_name, title_filter FROM channels WHERE is_active = 1"
sql = """
SELECT c.id, c.channel_id, c.channel_name, c.title_filter,
(SELECT COUNT(*) FROM videos v WHERE v.channel_id = c.id) as video_count,
(SELECT MAX(v.created_at) FROM videos v WHERE v.channel_id = c.id) as last_scanned_at
FROM channels c
WHERE c.is_active = 1
"""
with conn() as c:
cur = c.cursor()
cur.execute(sql)
return [
{"id": r[0], "channel_id": r[1], "channel_name": r[2], "title_filter": r[3]}
{
"id": r[0], "channel_id": r[1], "channel_name": r[2], "title_filter": r[3],
"video_count": r[4] or 0,
"last_scanned_at": r[5].isoformat() if r[5] else None,
}
for r in cur.fetchall()
]
@@ -99,13 +109,48 @@ def get_latest_video_date(channel_db_id: str) -> str | None:
return None
def _parse_iso8601_duration(dur: str) -> int:
"""Parse ISO 8601 duration (e.g. PT1M30S, PT5M, PT1H2M) to seconds."""
import re
m = re.match(r"PT(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?", dur or "")
if not m:
return 0
h, mn, s = (int(x) if x else 0 for x in m.groups())
return h * 3600 + mn * 60 + s
def _filter_shorts(videos: list[dict]) -> list[dict]:
"""Filter out YouTube Shorts (<=60s) by checking video durations via API."""
if not videos:
return videos
video_ids = [v["video_id"] for v in videos]
r = httpx.get(
"https://www.googleapis.com/youtube/v3/videos",
params={
"key": _api_key(),
"id": ",".join(video_ids),
"part": "contentDetails",
},
timeout=30,
)
r.raise_for_status()
durations = {}
for item in r.json().get("items", []):
durations[item["id"]] = _parse_iso8601_duration(
item.get("contentDetails", {}).get("duration", "")
)
return [v for v in videos if durations.get(v["video_id"], 0) > 60]
def fetch_channel_videos_iter(
channel_id: str,
published_after: str | None = None,
exclude_shorts: bool = True,
):
"""Yield pages of videos from a YouTube channel via Data API v3.
Each yield is a list of dicts for one API page (up to 50).
If exclude_shorts is True, filters out videos <= 60 seconds.
"""
params: dict = {
"key": _api_key(),
@@ -127,7 +172,7 @@ def fetch_channel_videos_iter(
r = httpx.get(
"https://www.googleapis.com/youtube/v3/search",
params=params,
timeout=15,
timeout=30,
)
r.raise_for_status()
data = r.json()
@@ -143,6 +188,9 @@ def fetch_channel_videos_iter(
"url": f"https://www.youtube.com/watch?v={vid}",
})
if page_videos and exclude_shorts:
page_videos = _filter_shorts(page_videos)
if page_videos:
yield page_videos