bulk commit changes!
2
.env
@@ -9,5 +9,5 @@ ROYALTY_IMAGE_API_KEY=
|
||||
ROYALTY_IMAGE_PROVIDERS=pixabay,unsplash,pexels,wikimedia,picsum
|
||||
PIXABAY_API_KEY=54637577-dbef68c927eec6553190fa4dc
|
||||
UNSPLASH_ACCESS_KEY=
|
||||
PEXELS_API_KEY=
|
||||
PEXELS_API_KEY=fRdPmXg16nsz1pPe0Zmp02eALJkhAz4sG7g4RN56Q3J90Qi6qV3Qvuz8
|
||||
SUMMARY_LENGTH_SCALE=3
|
||||
35
README.md
@@ -46,6 +46,41 @@ Exit codes:
|
||||
- `0`: Command completed successfully (including runs that store zero new rows)
|
||||
- `1`: Fatal command failure (for example missing API keys or unrecoverable runtime error)
|
||||
|
||||
## Admin Maintenance Commands
|
||||
|
||||
ClawFort includes an admin command suite to simplify operational recovery and maintenance.
|
||||
|
||||
```bash
|
||||
# List admin subcommands
|
||||
python -m backend.cli admin --help
|
||||
|
||||
# Fetch n articles on demand
|
||||
python -m backend.cli admin fetch --count 10
|
||||
|
||||
# Refetch images for latest 30 articles (sequential queue + exponential backoff)
|
||||
python -m backend.cli admin refetch-images --limit 30
|
||||
|
||||
# Clean archived records older than N days
|
||||
python -m backend.cli admin clean-archive --days 60 --confirm
|
||||
|
||||
# Clear optimized image cache files
|
||||
python -m backend.cli admin clear-cache --confirm
|
||||
|
||||
# Clear existing news items (includes archived when requested)
|
||||
python -m backend.cli admin clear-news --include-archived --confirm
|
||||
|
||||
# Rebuild content from scratch (clear + fetch)
|
||||
python -m backend.cli admin rebuild-site --count 10 --confirm
|
||||
|
||||
# Regenerate translations for existing articles
|
||||
python -m backend.cli admin regenerate-translations --limit 100
|
||||
```
|
||||
|
||||
Safety guardrails:
|
||||
- Destructive commands require `--confirm`.
|
||||
- Dry-run previews are available for applicable commands via `--dry-run`.
|
||||
- Admin output follows a structured format like: `admin:<command> status=<ok|error|blocked> ...`.
|
||||
|
||||
## Multilingual Support
|
||||
|
||||
ClawFort supports English (`en`), Tamil (`ta`), and Malayalam (`ml`) content delivery.
|
||||
|
||||
420
backend/cli.py
@@ -1,13 +1,31 @@
|
||||
import argparse
|
||||
import asyncio
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
|
||||
from sqlalchemy import and_, desc
|
||||
|
||||
from backend import config
|
||||
from backend.database import init_db
|
||||
from backend.news_service import process_and_store_news
|
||||
from backend.database import SessionLocal, init_db
|
||||
from backend.models import NewsItem
|
||||
from backend.news_service import (
|
||||
download_and_optimize_image,
|
||||
extract_image_keywords,
|
||||
fetch_royalty_free_image,
|
||||
generate_translations,
|
||||
process_and_store_news,
|
||||
)
|
||||
from backend.repository import (
|
||||
create_translation,
|
||||
delete_archived_news,
|
||||
get_translation,
|
||||
resolve_tldr_points,
|
||||
)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
@@ -16,6 +34,131 @@ logging.basicConfig(
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def positive_int(value: str) -> int:
|
||||
try:
|
||||
parsed = int(value)
|
||||
except ValueError as exc:
|
||||
raise argparse.ArgumentTypeError("must be an integer") from exc
|
||||
if parsed <= 0:
|
||||
raise argparse.ArgumentTypeError("must be greater than 0")
|
||||
return parsed
|
||||
|
||||
|
||||
def bounded_count(value: str) -> int:
|
||||
parsed = positive_int(value)
|
||||
if parsed > 50:
|
||||
raise argparse.ArgumentTypeError("must be <= 50")
|
||||
return parsed
|
||||
|
||||
|
||||
def print_result(command: str, status: str, **fields: object) -> None:
|
||||
parts = [f"admin:{command}", f"status={status}"]
|
||||
parts.extend([f"{key}={value}" for key, value in fields.items()])
|
||||
print(" ".join(parts))
|
||||
|
||||
|
||||
def require_confirm(args: argparse.Namespace, action: str) -> bool:
|
||||
if getattr(args, "confirm", False):
|
||||
return True
|
||||
print_result(
|
||||
action,
|
||||
"blocked",
|
||||
reason="missing-confirm",
|
||||
hint="rerun with --confirm",
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def build_contextual_query(headline: str, summary: str | None) -> str:
|
||||
headline_query = extract_image_keywords(headline)
|
||||
summary_query = extract_image_keywords(summary or "")
|
||||
|
||||
mood_terms: list[str] = []
|
||||
text = f"{headline} {summary or ''}".lower()
|
||||
if any(word in text for word in ("breakthrough", "launch", "record", "surge", "growth")):
|
||||
mood_terms.extend(["innovation", "future"])
|
||||
if any(word in text for word in ("risk", "lawsuit", "ban", "decline", "drop", "crash")):
|
||||
mood_terms.extend(["serious", "technology"])
|
||||
|
||||
combined = " ".join([headline_query, summary_query, " ".join(mood_terms)]).strip()
|
||||
cleaned = re.sub(r"\s+", " ", combined).strip()
|
||||
if not cleaned:
|
||||
return "ai machine learning deep learning"
|
||||
return cleaned
|
||||
|
||||
|
||||
async def refetch_images_for_latest(
|
||||
limit: int,
|
||||
max_attempts: int,
|
||||
dry_run: bool,
|
||||
) -> tuple[int, int]:
|
||||
db = SessionLocal()
|
||||
processed = 0
|
||||
refreshed = 0
|
||||
|
||||
try:
|
||||
items = (
|
||||
db.query(NewsItem)
|
||||
.filter(NewsItem.archived.is_(False))
|
||||
.order_by(desc(NewsItem.published_at))
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
total = len(items)
|
||||
for idx, item in enumerate(items, start=1):
|
||||
processed += 1
|
||||
query = build_contextual_query(item.headline, item.summary)
|
||||
|
||||
image_url: str | None = None
|
||||
image_credit: str | None = None
|
||||
local_image: str | None = None
|
||||
|
||||
for attempt in range(max_attempts):
|
||||
try:
|
||||
image_url, image_credit = await fetch_royalty_free_image(query)
|
||||
if not image_url:
|
||||
raise RuntimeError("no-image-url")
|
||||
local_image = await download_and_optimize_image(image_url)
|
||||
if not local_image:
|
||||
raise RuntimeError("image-download-or-optimize-failed")
|
||||
break
|
||||
except Exception:
|
||||
if attempt == max_attempts - 1:
|
||||
logger.exception("Image refetch failed for item=%s after retries", item.id)
|
||||
image_url = None
|
||||
local_image = None
|
||||
break
|
||||
delay = 2**attempt
|
||||
logger.warning(
|
||||
"Refetch retry item=%s attempt=%d delay=%ds",
|
||||
item.id,
|
||||
attempt + 1,
|
||||
delay,
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
if local_image:
|
||||
refreshed += 1
|
||||
if not dry_run:
|
||||
item.summary_image_url = local_image
|
||||
item.summary_image_credit = image_credit or item.summary_image_credit
|
||||
db.commit()
|
||||
|
||||
print_result(
|
||||
"refetch-images",
|
||||
"progress",
|
||||
current=idx,
|
||||
total=total,
|
||||
refreshed=refreshed,
|
||||
article_id=item.id,
|
||||
)
|
||||
|
||||
return processed, refreshed
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def build_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(prog="clawfort", description="ClawFort operations CLI")
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
@@ -27,6 +170,64 @@ def build_parser() -> argparse.ArgumentParser:
|
||||
)
|
||||
force_fetch_parser.set_defaults(handler=handle_force_fetch)
|
||||
|
||||
admin_parser = subparsers.add_parser(
|
||||
"admin",
|
||||
help="Administrative maintenance commands",
|
||||
description="Run admin maintenance and recovery operations.",
|
||||
)
|
||||
admin_subparsers = admin_parser.add_subparsers(dest="admin_command", required=True)
|
||||
|
||||
fetch_parser = admin_subparsers.add_parser("fetch", help="Fetch n number of articles")
|
||||
fetch_parser.add_argument("--count", type=bounded_count, default=5)
|
||||
fetch_parser.set_defaults(handler=handle_admin_fetch)
|
||||
|
||||
refetch_parser = admin_subparsers.add_parser(
|
||||
"refetch-images",
|
||||
help="Refetch and optimize latest article images",
|
||||
)
|
||||
refetch_parser.add_argument("--limit", type=positive_int, default=30)
|
||||
refetch_parser.add_argument("--max-attempts", type=positive_int, default=4)
|
||||
refetch_parser.add_argument("--dry-run", action="store_true")
|
||||
refetch_parser.set_defaults(handler=handle_admin_refetch_images)
|
||||
|
||||
clean_archive_parser = admin_subparsers.add_parser(
|
||||
"clean-archive",
|
||||
help="Delete archived items older than retention window",
|
||||
)
|
||||
clean_archive_parser.add_argument("--days", type=positive_int, default=60)
|
||||
clean_archive_parser.add_argument("--confirm", action="store_true")
|
||||
clean_archive_parser.add_argument("--dry-run", action="store_true")
|
||||
clean_archive_parser.set_defaults(handler=handle_admin_clean_archive)
|
||||
|
||||
clear_cache_parser = admin_subparsers.add_parser(
|
||||
"clear-cache", help="Clear optimized image cache"
|
||||
)
|
||||
clear_cache_parser.add_argument("--confirm", action="store_true")
|
||||
clear_cache_parser.add_argument("--dry-run", action="store_true")
|
||||
clear_cache_parser.set_defaults(handler=handle_admin_clear_cache)
|
||||
|
||||
clear_news_parser = admin_subparsers.add_parser("clear-news", help="Clear existing news items")
|
||||
clear_news_parser.add_argument("--include-archived", action="store_true")
|
||||
clear_news_parser.add_argument("--confirm", action="store_true")
|
||||
clear_news_parser.add_argument("--dry-run", action="store_true")
|
||||
clear_news_parser.set_defaults(handler=handle_admin_clear_news)
|
||||
|
||||
rebuild_parser = admin_subparsers.add_parser(
|
||||
"rebuild-site", help="Clear and rebuild site content"
|
||||
)
|
||||
rebuild_parser.add_argument("--count", type=bounded_count, default=5)
|
||||
rebuild_parser.add_argument("--confirm", action="store_true")
|
||||
rebuild_parser.add_argument("--dry-run", action="store_true")
|
||||
rebuild_parser.set_defaults(handler=handle_admin_rebuild_site)
|
||||
|
||||
regen_parser = admin_subparsers.add_parser(
|
||||
"regenerate-translations",
|
||||
help="Regenerate translations for existing articles",
|
||||
)
|
||||
regen_parser.add_argument("--limit", type=positive_int, default=0)
|
||||
regen_parser.add_argument("--dry-run", action="store_true")
|
||||
regen_parser.set_defaults(handler=handle_admin_regenerate_translations)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
@@ -60,6 +261,221 @@ def handle_force_fetch(_: argparse.Namespace) -> int:
|
||||
return 1
|
||||
|
||||
|
||||
def handle_admin_fetch(args: argparse.Namespace) -> int:
|
||||
start = time.monotonic()
|
||||
try:
|
||||
validate_runtime()
|
||||
init_db()
|
||||
stored = asyncio.run(process_and_store_news(article_count=args.count))
|
||||
elapsed = time.monotonic() - start
|
||||
print_result("fetch", "ok", requested=args.count, stored=stored, elapsed=f"{elapsed:.1f}s")
|
||||
return 0
|
||||
except Exception:
|
||||
logger.exception("admin fetch failed")
|
||||
print_result("fetch", "error")
|
||||
return 1
|
||||
|
||||
|
||||
def handle_admin_refetch_images(args: argparse.Namespace) -> int:
|
||||
start = time.monotonic()
|
||||
try:
|
||||
init_db()
|
||||
processed, refreshed = asyncio.run(
|
||||
refetch_images_for_latest(
|
||||
limit=min(args.limit, 30),
|
||||
max_attempts=args.max_attempts,
|
||||
dry_run=args.dry_run,
|
||||
)
|
||||
)
|
||||
elapsed = time.monotonic() - start
|
||||
print_result(
|
||||
"refetch-images",
|
||||
"ok",
|
||||
processed=processed,
|
||||
refreshed=refreshed,
|
||||
dry_run=args.dry_run,
|
||||
elapsed=f"{elapsed:.1f}s",
|
||||
)
|
||||
return 0
|
||||
except Exception:
|
||||
logger.exception("admin refetch-images failed")
|
||||
print_result("refetch-images", "error")
|
||||
return 1
|
||||
|
||||
|
||||
def handle_admin_clean_archive(args: argparse.Namespace) -> int:
|
||||
if not require_confirm(args, "clean-archive"):
|
||||
return 2
|
||||
|
||||
db = SessionLocal()
|
||||
try:
|
||||
cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=args.days)
|
||||
query = db.query(NewsItem).filter(
|
||||
and_(NewsItem.archived.is_(True), NewsItem.created_at < cutoff)
|
||||
)
|
||||
count = query.count()
|
||||
if args.dry_run:
|
||||
print_result("clean-archive", "ok", dry_run=True, would_delete=count)
|
||||
return 0
|
||||
deleted = delete_archived_news(db, days_after_archive=args.days)
|
||||
print_result("clean-archive", "ok", deleted=deleted)
|
||||
return 0
|
||||
except Exception:
|
||||
logger.exception("admin clean-archive failed")
|
||||
print_result("clean-archive", "error")
|
||||
return 1
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def handle_admin_clear_cache(args: argparse.Namespace) -> int:
|
||||
if not require_confirm(args, "clear-cache"):
|
||||
return 2
|
||||
|
||||
try:
|
||||
os.makedirs(config.STATIC_IMAGES_DIR, exist_ok=True)
|
||||
files = [
|
||||
os.path.join(config.STATIC_IMAGES_DIR, name)
|
||||
for name in os.listdir(config.STATIC_IMAGES_DIR)
|
||||
if name.lower().endswith((".jpg", ".jpeg", ".png", ".webp"))
|
||||
]
|
||||
if args.dry_run:
|
||||
print_result("clear-cache", "ok", dry_run=True, would_delete=len(files))
|
||||
return 0
|
||||
deleted = 0
|
||||
for file_path in files:
|
||||
try:
|
||||
os.remove(file_path)
|
||||
deleted += 1
|
||||
except OSError:
|
||||
logger.warning("Failed to remove cache file: %s", file_path)
|
||||
print_result("clear-cache", "ok", deleted=deleted)
|
||||
return 0
|
||||
except Exception:
|
||||
logger.exception("admin clear-cache failed")
|
||||
print_result("clear-cache", "error")
|
||||
return 1
|
||||
|
||||
|
||||
def handle_admin_clear_news(args: argparse.Namespace) -> int:
|
||||
if not require_confirm(args, "clear-news"):
|
||||
return 2
|
||||
|
||||
db = SessionLocal()
|
||||
try:
|
||||
query = db.query(NewsItem)
|
||||
if not args.include_archived:
|
||||
query = query.filter(NewsItem.archived.is_(False))
|
||||
items = query.all()
|
||||
if args.dry_run:
|
||||
print_result("clear-news", "ok", dry_run=True, would_delete=len(items))
|
||||
return 0
|
||||
deleted = 0
|
||||
for item in items:
|
||||
db.delete(item)
|
||||
deleted += 1
|
||||
db.commit()
|
||||
print_result("clear-news", "ok", deleted=deleted)
|
||||
return 0
|
||||
except Exception:
|
||||
db.rollback()
|
||||
logger.exception("admin clear-news failed")
|
||||
print_result("clear-news", "error")
|
||||
return 1
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def handle_admin_rebuild_site(args: argparse.Namespace) -> int:
|
||||
if not require_confirm(args, "rebuild-site"):
|
||||
return 2
|
||||
if args.dry_run:
|
||||
print_result("rebuild-site", "ok", dry_run=True, steps="clear-news,fetch")
|
||||
return 0
|
||||
|
||||
clear_result = handle_admin_clear_news(
|
||||
argparse.Namespace(include_archived=True, confirm=True, dry_run=False)
|
||||
)
|
||||
if clear_result != 0:
|
||||
print_result("rebuild-site", "error", step="clear-news")
|
||||
return clear_result
|
||||
|
||||
fetch_result = handle_admin_fetch(argparse.Namespace(count=args.count))
|
||||
if fetch_result != 0:
|
||||
print_result("rebuild-site", "error", step="fetch")
|
||||
return fetch_result
|
||||
|
||||
print_result("rebuild-site", "ok", count=args.count)
|
||||
return 0
|
||||
|
||||
|
||||
def handle_admin_regenerate_translations(args: argparse.Namespace) -> int:
|
||||
db = SessionLocal()
|
||||
try:
|
||||
query = db.query(NewsItem).filter(NewsItem.archived.is_(False)).order_by(desc(NewsItem.id))
|
||||
if args.limit and args.limit > 0:
|
||||
query = query.limit(args.limit)
|
||||
items = query.all()
|
||||
|
||||
regenerated = 0
|
||||
for item in items:
|
||||
tldr_points = resolve_tldr_points(item, None)
|
||||
translations = asyncio.run(
|
||||
generate_translations(
|
||||
headline=item.headline,
|
||||
summary=item.summary,
|
||||
tldr_points=tldr_points,
|
||||
summary_body=item.summary_body,
|
||||
source_citation=item.source_citation,
|
||||
)
|
||||
)
|
||||
for language_code, payload in translations.items():
|
||||
if args.dry_run:
|
||||
regenerated += 1
|
||||
continue
|
||||
existing = get_translation(db, item.id, language_code)
|
||||
if existing is None:
|
||||
create_translation(
|
||||
db=db,
|
||||
news_item_id=item.id,
|
||||
language=language_code,
|
||||
headline=payload["headline"],
|
||||
summary=payload["summary"],
|
||||
tldr_points=payload.get("tldr_points"),
|
||||
summary_body=payload.get("summary_body"),
|
||||
source_citation=payload.get("source_citation"),
|
||||
)
|
||||
else:
|
||||
existing.headline = payload["headline"]
|
||||
existing.summary = payload["summary"]
|
||||
existing.tldr_points = (
|
||||
json.dumps(payload.get("tldr_points"))
|
||||
if payload.get("tldr_points")
|
||||
else None
|
||||
)
|
||||
existing.summary_body = payload.get("summary_body")
|
||||
existing.source_citation = payload.get("source_citation")
|
||||
regenerated += 1
|
||||
if not args.dry_run:
|
||||
db.commit()
|
||||
|
||||
print_result(
|
||||
"regenerate-translations",
|
||||
"ok",
|
||||
articles=len(items),
|
||||
regenerated=regenerated,
|
||||
dry_run=args.dry_run,
|
||||
)
|
||||
return 0
|
||||
except Exception:
|
||||
db.rollback()
|
||||
logger.exception("admin regenerate-translations failed")
|
||||
print_result("regenerate-translations", "error")
|
||||
return 1
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
parser = build_parser()
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
@@ -5,6 +5,7 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
from collections.abc import Awaitable, Callable
|
||||
from io import BytesIO
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
@@ -23,6 +24,7 @@ from backend.repository import (
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
PLACEHOLDER_IMAGE_PATH = "/static/images/placeholder.png"
|
||||
GENERIC_AI_FALLBACK_URL = "https://placehold.co/1200x630/0f172a/e2e8f0/png?text=AI+News"
|
||||
|
||||
|
||||
async def call_perplexity_api(query: str) -> dict | None:
|
||||
@@ -419,7 +421,7 @@ def extract_image_keywords(headline: str) -> str:
|
||||
- Handles edge cases (empty, only stop words, special characters)
|
||||
"""
|
||||
if not headline or not headline.strip():
|
||||
return "news technology"
|
||||
return "ai machine learning deep learning"
|
||||
|
||||
# Normalize: remove special characters, keep alphanumeric and spaces
|
||||
cleaned = re.sub(r"[^\w\s]", " ", headline)
|
||||
@@ -433,7 +435,7 @@ def extract_image_keywords(headline: str) -> str:
|
||||
keywords = keywords[:5]
|
||||
|
||||
if not keywords:
|
||||
return "news technology"
|
||||
return "ai machine learning deep learning"
|
||||
|
||||
return " ".join(keywords)
|
||||
|
||||
@@ -465,7 +467,7 @@ async def fetch_pixabay_image(query: str) -> tuple[str | None, str | None]:
|
||||
except Exception:
|
||||
logger.exception("Pixabay image retrieval failed")
|
||||
|
||||
return None, None
|
||||
return GENERIC_AI_FALLBACK_URL, "Generic AI fallback"
|
||||
|
||||
|
||||
async def fetch_unsplash_image(query: str) -> tuple[str | None, str | None]:
|
||||
@@ -569,7 +571,9 @@ _PROVIDER_REGISTRY: dict[str, tuple] = {
|
||||
}
|
||||
|
||||
|
||||
def get_enabled_providers() -> list[tuple[str, callable]]:
|
||||
def get_enabled_providers() -> list[
|
||||
tuple[str, Callable[[str], Awaitable[tuple[str | None, str | None]]]]
|
||||
]:
|
||||
"""Get ordered list of enabled providers based on config and available API keys."""
|
||||
provider_names = [
|
||||
p.strip().lower() for p in config.ROYALTY_IMAGE_PROVIDERS.split(",") if p.strip()
|
||||
@@ -663,8 +667,16 @@ async def download_and_optimize_image(image_url: str) -> str | None:
|
||||
return None
|
||||
|
||||
|
||||
async def fetch_news_with_retry(max_attempts: int = 3) -> list[dict]:
|
||||
async def fetch_news_with_retry(
|
||||
max_attempts: int = 3, article_count: int | None = None
|
||||
) -> list[dict]:
|
||||
query = "What are the latest AI news from the last hour? Include source URLs and image URLs."
|
||||
if article_count is not None:
|
||||
bounded = max(1, min(50, int(article_count)))
|
||||
query = (
|
||||
f"What are the latest AI news from the last hour? Return exactly {bounded} items. "
|
||||
"Include source URLs and image URLs."
|
||||
)
|
||||
|
||||
for attempt in range(max_attempts):
|
||||
try:
|
||||
@@ -687,8 +699,8 @@ async def fetch_news_with_retry(max_attempts: int = 3) -> list[dict]:
|
||||
return []
|
||||
|
||||
|
||||
async def process_and_store_news() -> int:
|
||||
items = await fetch_news_with_retry()
|
||||
async def process_and_store_news(article_count: int | None = None) -> int:
|
||||
items = await fetch_news_with_retry(article_count=article_count)
|
||||
if not items:
|
||||
logger.warning("No news items fetched this cycle")
|
||||
return 0
|
||||
|
||||
BIN
backend/static/images/03d10966744a543b01f76d3f8f9f5a44.jpg
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
backend/static/images/0f256078c66f1923de1fce7e0ac70ba0.jpg
Normal file
|
After Width: | Height: | Size: 33 KiB |
BIN
backend/static/images/16237b1efa58b893b9458713ff8304ad.jpg
Normal file
|
After Width: | Height: | Size: 68 KiB |
BIN
backend/static/images/173c86c39207d39e5684d356369046ef.jpg
Normal file
|
After Width: | Height: | Size: 43 KiB |
BIN
backend/static/images/1f35b940c79df0d9cd07650a5d439dac.jpg
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
backend/static/images/1fd212a8fb8dfdcb72be46aaf8c27c80.jpg
Normal file
|
After Width: | Height: | Size: 54 KiB |
BIN
backend/static/images/2fbeef6191605d8a528ee5a0d7333d20.jpg
Normal file
|
After Width: | Height: | Size: 37 KiB |
BIN
backend/static/images/3c406d580431ebfa2ec850741540632a.jpg
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
backend/static/images/453bc7a5351756e01b681f6be72eddf5.jpg
Normal file
|
After Width: | Height: | Size: 63 KiB |
BIN
backend/static/images/4d69537bcdf5bee088ba2ad5371bb14d.jpg
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
backend/static/images/593ebfe0d9f505d189822895e2eedeae.jpg
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
backend/static/images/636a9ea318f9c6d8e81682a4e934480a.jpg
Normal file
|
After Width: | Height: | Size: 63 KiB |
BIN
backend/static/images/6c9fee8a62ae79475312057f16fb8af0.jpg
Normal file
|
After Width: | Height: | Size: 44 KiB |
BIN
backend/static/images/7ce1961a9a6d87e0af2a54840e7902a2.jpg
Normal file
|
After Width: | Height: | Size: 65 KiB |
BIN
backend/static/images/808150f6bed9baf3a20bb4fc61cc6f4c.jpg
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
backend/static/images/8298d980d5a6fee90d308d170c57549d.jpg
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
backend/static/images/93c252929772709cbd517afce3ceac95.jpg
Normal file
|
After Width: | Height: | Size: 113 KiB |
BIN
backend/static/images/a3b7c96431c59fb77a87d0a26d9b53b2.jpg
Normal file
|
After Width: | Height: | Size: 67 KiB |
BIN
backend/static/images/a9cd2b37ca1969434e65fb9bed294b74.jpg
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
backend/static/images/b16ae02672911e2bdf655cbf96b9a2f7.jpg
Normal file
|
After Width: | Height: | Size: 105 KiB |
BIN
backend/static/images/b3edebaad82cdbf32de85d8cc5f274ad.jpg
Normal file
|
After Width: | Height: | Size: 44 KiB |
BIN
backend/static/images/bdd724aff1b83c6381da2d364b729326.jpg
Normal file
|
After Width: | Height: | Size: 46 KiB |
BIN
backend/static/images/c638156b3201d2debc5ba5955fa54aa2.jpg
Normal file
|
After Width: | Height: | Size: 46 KiB |
BIN
backend/static/images/d8e11fc6776d40a9389afe578313071b.jpg
Normal file
|
After Width: | Height: | Size: 135 KiB |
BIN
backend/static/images/e9f56690cf067da751ce58f2f59f4627.jpg
Normal file
|
After Width: | Height: | Size: 81 KiB |
BIN
backend/static/images/e9fd9f16cc882f7d112cc174a2efefc7.jpg
Normal file
|
After Width: | Height: | Size: 50 KiB |
BIN
data/clawfort.db
@@ -126,11 +126,28 @@
|
||||
}
|
||||
.theme-menu-item:hover { background: rgba(92, 124, 250, 0.15); }
|
||||
.hero-overlay {
|
||||
background: linear-gradient(to top, rgba(15, 23, 42, 0.92), rgba(15, 23, 42, 0.45), transparent);
|
||||
background: linear-gradient(to top, rgba(2, 6, 23, 0.94), rgba(15, 23, 42, 0.62), rgba(15, 23, 42, 0.22), transparent);
|
||||
}
|
||||
.hero-title { color: #e2e8f0; text-shadow: 0 2px 8px rgba(0, 0, 0, 0.55); }
|
||||
.hero-summary { color: #cbd5e1; text-shadow: 0 1px 6px rgba(0, 0, 0, 0.55); }
|
||||
.hero-meta { color: #cbd5e1; }
|
||||
.hero-meta { color: #e2e8f0; text-shadow: 0 1px 6px rgba(0, 0, 0, 0.55); }
|
||||
.hero-latest-pill {
|
||||
background: rgba(59, 130, 246, 0.2);
|
||||
color: #dbeafe;
|
||||
border: 1px solid rgba(147, 197, 253, 0.45);
|
||||
}
|
||||
.hero-time-pill {
|
||||
background: rgba(15, 23, 42, 0.55);
|
||||
color: #e2e8f0;
|
||||
border: 1px solid rgba(148, 163, 184, 0.35);
|
||||
padding: 3px 8px;
|
||||
border-radius: 9999px;
|
||||
}
|
||||
.tldr-shimmer {
|
||||
height: 12px;
|
||||
width: 85%;
|
||||
border-radius: 9999px;
|
||||
}
|
||||
.news-card-title { color: var(--cf-text-strong); }
|
||||
.news-card-summary { color: var(--cf-text-muted); }
|
||||
.news-card-meta { color: var(--cf-text-muted); }
|
||||
@@ -157,7 +174,17 @@
|
||||
color: #f8fafc;
|
||||
}
|
||||
html[data-theme='light'] .hero-overlay {
|
||||
background: linear-gradient(to top, rgba(15, 23, 42, 0.9), rgba(15, 23, 42, 0.35), transparent);
|
||||
background: linear-gradient(to top, rgba(15, 23, 42, 0.92), rgba(30, 41, 59, 0.58), rgba(30, 41, 59, 0.2), transparent);
|
||||
}
|
||||
html[data-theme='light'] .hero-latest-pill {
|
||||
background: rgba(37, 99, 235, 0.24);
|
||||
border-color: rgba(37, 99, 235, 0.55);
|
||||
color: #eff6ff;
|
||||
}
|
||||
html[data-theme='light'] .hero-time-pill {
|
||||
background: rgba(15, 23, 42, 0.52);
|
||||
border-color: rgba(226, 232, 240, 0.35);
|
||||
color: #f8fafc;
|
||||
}
|
||||
html[data-theme='light'] .news-card-btn { color: #1e3a8a; }
|
||||
html[data-theme='light'] .modal-cta {
|
||||
@@ -257,15 +284,15 @@
|
||||
<div class="absolute inset-0 hero-overlay"></div>
|
||||
<div class="absolute bottom-0 left-0 right-0 p-6 sm:p-10">
|
||||
<div class="flex items-center gap-3 mb-3">
|
||||
<span class="px-2.5 py-1 bg-cf-500/20 text-cf-400 text-xs font-semibold rounded-full border border-cf-500/30">LATEST</span>
|
||||
<span class="text-gray-400 text-sm" x-text="timeAgo(item.published_at)"></span>
|
||||
<span class="px-2.5 py-1 text-xs font-semibold rounded-full hero-latest-pill">LATEST</span>
|
||||
<span class="text-sm hero-time-pill" x-text="timeAgo(item.published_at)"></span>
|
||||
</div>
|
||||
<h1 class="text-2xl sm:text-3xl lg:text-4xl font-extrabold leading-tight mb-3 max-w-4xl hero-title" x-text="item.headline"></h1>
|
||||
<p class="text-base sm:text-lg max-w-3xl line-clamp-3 mb-4 hero-summary" x-text="item.summary"></p>
|
||||
<div class="flex flex-wrap items-center gap-4 text-sm hero-meta">
|
||||
<button class="px-3 py-1.5 rounded-md bg-cf-500/20 text-cf-300 hover:bg-cf-500/30 transition-colors"
|
||||
@click="trackEvent('hero-cta-click', { article_id: item.id }); window.open(item.source_url || '#', '_blank')">
|
||||
Read Full Article
|
||||
@click="trackEvent('hero-cta-click', { article_id: item.id }); window.dispatchEvent(new CustomEvent('open-summary', { detail: item }))">
|
||||
Read TL;DR
|
||||
</button>
|
||||
<a :href="item.source_url" target="_blank" rel="noopener"
|
||||
class="hover:text-cf-400 transition-colors"
|
||||
@@ -350,7 +377,7 @@
|
||||
|
||||
<div x-show="modalOpen" x-cloak class="fixed inset-0 z-50 flex items-center justify-center" @keydown.escape.window="closeSummary()">
|
||||
<div class="absolute inset-0 bg-black/70" @click="closeSummary()"></div>
|
||||
<div role="dialog" aria-modal="true" aria-label="Article summary" class="relative w-full max-w-2xl mx-4 max-h-[90vh] overflow-auto rounded-xl border border-white/10 bg-[#0f172a]">
|
||||
<div role="dialog" aria-modal="true" aria-label="Article summary" class="relative w-full sm:w-[92vw] lg:w-[70vw] xl:w-[60vw] 2xl:w-[50vw] max-w-[1200px] mx-4 max-h-[96vh] overflow-auto rounded-xl border border-white/10 bg-[#0f172a]">
|
||||
<div class="p-6 space-y-5 cf-modal" x-show="modalItem">
|
||||
<div class="flex justify-end">
|
||||
<button @click="closeSummary()" aria-label="Close summary modal" class="transition-colors modal-close-btn">Close</button>
|
||||
@@ -370,7 +397,11 @@
|
||||
|
||||
<div>
|
||||
<h3 class="text-sm uppercase tracking-wide font-semibold mb-2 modal-section-title">TL;DR</h3>
|
||||
<ul class="list-disc pl-5 space-y-1 text-sm modal-body-text" x-show="modalItem?.tldr_points && modalItem.tldr_points.length > 0">
|
||||
<div x-show="modalTldrLoading" class="space-y-2" aria-hidden="true">
|
||||
<div class="skeleton tldr-shimmer"></div>
|
||||
<div class="skeleton tldr-shimmer w-[70%]"></div>
|
||||
</div>
|
||||
<ul class="list-disc pl-5 space-y-1 text-sm modal-body-text" x-show="!modalTldrLoading && modalItem?.tldr_points && modalItem.tldr_points.length > 0">
|
||||
<template x-for="(point, idx) in (modalItem?.tldr_points || [])" :key="idx">
|
||||
<li x-text="point"></li>
|
||||
</template>
|
||||
@@ -797,6 +828,7 @@ function newsFeed() {
|
||||
modalOpen: false,
|
||||
modalItem: null,
|
||||
modalImageLoading: true,
|
||||
modalTldrLoading: true,
|
||||
imageLoaded: {},
|
||||
|
||||
async init() {
|
||||
@@ -815,6 +847,11 @@ function newsFeed() {
|
||||
await this.loadMore();
|
||||
this.initialLoading = false;
|
||||
});
|
||||
|
||||
window.addEventListener('open-summary', (event) => {
|
||||
if (!event?.detail) return;
|
||||
this.openSummary(event.detail);
|
||||
});
|
||||
},
|
||||
|
||||
waitForHero() {
|
||||
@@ -886,6 +923,10 @@ function newsFeed() {
|
||||
this.modalItem = item;
|
||||
this.modalOpen = true;
|
||||
this.modalImageLoading = true;
|
||||
this.modalTldrLoading = true;
|
||||
setTimeout(() => {
|
||||
if (this.modalOpen) this.modalTldrLoading = false;
|
||||
}, 250);
|
||||
trackEvent('summary-modal-open', { article_id: item.id });
|
||||
},
|
||||
|
||||
@@ -893,6 +934,7 @@ function newsFeed() {
|
||||
const id = this.modalItem ? this.modalItem.id : null;
|
||||
this.modalOpen = false;
|
||||
this.modalItem = null;
|
||||
this.modalTldrLoading = true;
|
||||
trackEvent('summary-modal-close', { article_id: id });
|
||||
},
|
||||
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
schema: spec-driven
|
||||
created: 2026-02-13
|
||||