diff --git a/.env b/.env index 854e461..4cb0207 100644 --- a/.env +++ b/.env @@ -9,5 +9,5 @@ ROYALTY_IMAGE_API_KEY= ROYALTY_IMAGE_PROVIDERS=pixabay,unsplash,pexels,wikimedia,picsum PIXABAY_API_KEY=54637577-dbef68c927eec6553190fa4dc UNSPLASH_ACCESS_KEY= -PEXELS_API_KEY= +PEXELS_API_KEY=fRdPmXg16nsz1pPe0Zmp02eALJkhAz4sG7g4RN56Q3J90Qi6qV3Qvuz8 SUMMARY_LENGTH_SCALE=3 \ No newline at end of file diff --git a/README.md b/README.md index dcd4bea..26623b3 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,41 @@ Exit codes: - `0`: Command completed successfully (including runs that store zero new rows) - `1`: Fatal command failure (for example missing API keys or unrecoverable runtime error) +## Admin Maintenance Commands + +ClawFort includes an admin command suite to simplify operational recovery and maintenance. + +```bash +# List admin subcommands +python -m backend.cli admin --help + +# Fetch n articles on demand +python -m backend.cli admin fetch --count 10 + +# Refetch images for latest 30 articles (sequential queue + exponential backoff) +python -m backend.cli admin refetch-images --limit 30 + +# Clean archived records older than N days +python -m backend.cli admin clean-archive --days 60 --confirm + +# Clear optimized image cache files +python -m backend.cli admin clear-cache --confirm + +# Clear existing news items (includes archived when requested) +python -m backend.cli admin clear-news --include-archived --confirm + +# Rebuild content from scratch (clear + fetch) +python -m backend.cli admin rebuild-site --count 10 --confirm + +# Regenerate translations for existing articles +python -m backend.cli admin regenerate-translations --limit 100 +``` + +Safety guardrails: +- Destructive commands require `--confirm`. +- Dry-run previews are available for applicable commands via `--dry-run`. +- Admin output follows a structured format like: `admin: status= ...`. + ## Multilingual Support ClawFort supports English (`en`), Tamil (`ta`), and Malayalam (`ml`) content delivery. diff --git a/backend/__pycache__/cli.cpython-313.pyc b/backend/__pycache__/cli.cpython-313.pyc index e558b78..8ad57c9 100644 Binary files a/backend/__pycache__/cli.cpython-313.pyc and b/backend/__pycache__/cli.cpython-313.pyc differ diff --git a/backend/__pycache__/config.cpython-313.pyc b/backend/__pycache__/config.cpython-313.pyc index 48dd2bc..94ff737 100644 Binary files a/backend/__pycache__/config.cpython-313.pyc and b/backend/__pycache__/config.cpython-313.pyc differ diff --git a/backend/__pycache__/news_service.cpython-313.pyc b/backend/__pycache__/news_service.cpython-313.pyc index d8e65a8..0a9dae9 100644 Binary files a/backend/__pycache__/news_service.cpython-313.pyc and b/backend/__pycache__/news_service.cpython-313.pyc differ diff --git a/backend/cli.py b/backend/cli.py index ee38231..688e1f5 100644 --- a/backend/cli.py +++ b/backend/cli.py @@ -1,13 +1,31 @@ import argparse import asyncio +import datetime +import json import logging import os +import re import sys import time +from sqlalchemy import and_, desc + from backend import config -from backend.database import init_db -from backend.news_service import process_and_store_news +from backend.database import SessionLocal, init_db +from backend.models import NewsItem +from backend.news_service import ( + download_and_optimize_image, + extract_image_keywords, + fetch_royalty_free_image, + generate_translations, + process_and_store_news, +) +from backend.repository import ( + create_translation, + delete_archived_news, + get_translation, + resolve_tldr_points, +) logging.basicConfig( level=logging.INFO, @@ -16,6 +34,131 @@ logging.basicConfig( logger = logging.getLogger(__name__) +def positive_int(value: str) -> int: + try: + parsed = int(value) + except ValueError as exc: + raise argparse.ArgumentTypeError("must be an integer") from exc + if parsed <= 0: + raise argparse.ArgumentTypeError("must be greater than 0") + return parsed + + +def bounded_count(value: str) -> int: + parsed = positive_int(value) + if parsed > 50: + raise argparse.ArgumentTypeError("must be <= 50") + return parsed + + +def print_result(command: str, status: str, **fields: object) -> None: + parts = [f"admin:{command}", f"status={status}"] + parts.extend([f"{key}={value}" for key, value in fields.items()]) + print(" ".join(parts)) + + +def require_confirm(args: argparse.Namespace, action: str) -> bool: + if getattr(args, "confirm", False): + return True + print_result( + action, + "blocked", + reason="missing-confirm", + hint="rerun with --confirm", + ) + return False + + +def build_contextual_query(headline: str, summary: str | None) -> str: + headline_query = extract_image_keywords(headline) + summary_query = extract_image_keywords(summary or "") + + mood_terms: list[str] = [] + text = f"{headline} {summary or ''}".lower() + if any(word in text for word in ("breakthrough", "launch", "record", "surge", "growth")): + mood_terms.extend(["innovation", "future"]) + if any(word in text for word in ("risk", "lawsuit", "ban", "decline", "drop", "crash")): + mood_terms.extend(["serious", "technology"]) + + combined = " ".join([headline_query, summary_query, " ".join(mood_terms)]).strip() + cleaned = re.sub(r"\s+", " ", combined).strip() + if not cleaned: + return "ai machine learning deep learning" + return cleaned + + +async def refetch_images_for_latest( + limit: int, + max_attempts: int, + dry_run: bool, +) -> tuple[int, int]: + db = SessionLocal() + processed = 0 + refreshed = 0 + + try: + items = ( + db.query(NewsItem) + .filter(NewsItem.archived.is_(False)) + .order_by(desc(NewsItem.published_at)) + .limit(limit) + .all() + ) + + total = len(items) + for idx, item in enumerate(items, start=1): + processed += 1 + query = build_contextual_query(item.headline, item.summary) + + image_url: str | None = None + image_credit: str | None = None + local_image: str | None = None + + for attempt in range(max_attempts): + try: + image_url, image_credit = await fetch_royalty_free_image(query) + if not image_url: + raise RuntimeError("no-image-url") + local_image = await download_and_optimize_image(image_url) + if not local_image: + raise RuntimeError("image-download-or-optimize-failed") + break + except Exception: + if attempt == max_attempts - 1: + logger.exception("Image refetch failed for item=%s after retries", item.id) + image_url = None + local_image = None + break + delay = 2**attempt + logger.warning( + "Refetch retry item=%s attempt=%d delay=%ds", + item.id, + attempt + 1, + delay, + ) + await asyncio.sleep(delay) + + if local_image: + refreshed += 1 + if not dry_run: + item.summary_image_url = local_image + item.summary_image_credit = image_credit or item.summary_image_credit + db.commit() + + print_result( + "refetch-images", + "progress", + current=idx, + total=total, + refreshed=refreshed, + article_id=item.id, + ) + + return processed, refreshed + finally: + db.close() + + def build_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser(prog="clawfort", description="ClawFort operations CLI") subparsers = parser.add_subparsers(dest="command", required=True) @@ -27,6 +170,64 @@ def build_parser() -> argparse.ArgumentParser: ) force_fetch_parser.set_defaults(handler=handle_force_fetch) + admin_parser = subparsers.add_parser( + "admin", + help="Administrative maintenance commands", + description="Run admin maintenance and recovery operations.", + ) + admin_subparsers = admin_parser.add_subparsers(dest="admin_command", required=True) + + fetch_parser = admin_subparsers.add_parser("fetch", help="Fetch n number of articles") + fetch_parser.add_argument("--count", type=bounded_count, default=5) + fetch_parser.set_defaults(handler=handle_admin_fetch) + + refetch_parser = admin_subparsers.add_parser( + "refetch-images", + help="Refetch and optimize latest article images", + ) + refetch_parser.add_argument("--limit", type=positive_int, default=30) + refetch_parser.add_argument("--max-attempts", type=positive_int, default=4) + refetch_parser.add_argument("--dry-run", action="store_true") + refetch_parser.set_defaults(handler=handle_admin_refetch_images) + + clean_archive_parser = admin_subparsers.add_parser( + "clean-archive", + help="Delete archived items older than retention window", + ) + clean_archive_parser.add_argument("--days", type=positive_int, default=60) + clean_archive_parser.add_argument("--confirm", action="store_true") + clean_archive_parser.add_argument("--dry-run", action="store_true") + clean_archive_parser.set_defaults(handler=handle_admin_clean_archive) + + clear_cache_parser = admin_subparsers.add_parser( + "clear-cache", help="Clear optimized image cache" + ) + clear_cache_parser.add_argument("--confirm", action="store_true") + clear_cache_parser.add_argument("--dry-run", action="store_true") + clear_cache_parser.set_defaults(handler=handle_admin_clear_cache) + + clear_news_parser = admin_subparsers.add_parser("clear-news", help="Clear existing news items") + clear_news_parser.add_argument("--include-archived", action="store_true") + clear_news_parser.add_argument("--confirm", action="store_true") + clear_news_parser.add_argument("--dry-run", action="store_true") + clear_news_parser.set_defaults(handler=handle_admin_clear_news) + + rebuild_parser = admin_subparsers.add_parser( + "rebuild-site", help="Clear and rebuild site content" + ) + rebuild_parser.add_argument("--count", type=bounded_count, default=5) + rebuild_parser.add_argument("--confirm", action="store_true") + rebuild_parser.add_argument("--dry-run", action="store_true") + rebuild_parser.set_defaults(handler=handle_admin_rebuild_site) + + regen_parser = admin_subparsers.add_parser( + "regenerate-translations", + help="Regenerate translations for existing articles", + ) + regen_parser.add_argument("--limit", type=positive_int, default=0) + regen_parser.add_argument("--dry-run", action="store_true") + regen_parser.set_defaults(handler=handle_admin_regenerate_translations) + return parser @@ -60,6 +261,221 @@ def handle_force_fetch(_: argparse.Namespace) -> int: return 1 +def handle_admin_fetch(args: argparse.Namespace) -> int: + start = time.monotonic() + try: + validate_runtime() + init_db() + stored = asyncio.run(process_and_store_news(article_count=args.count)) + elapsed = time.monotonic() - start + print_result("fetch", "ok", requested=args.count, stored=stored, elapsed=f"{elapsed:.1f}s") + return 0 + except Exception: + logger.exception("admin fetch failed") + print_result("fetch", "error") + return 1 + + +def handle_admin_refetch_images(args: argparse.Namespace) -> int: + start = time.monotonic() + try: + init_db() + processed, refreshed = asyncio.run( + refetch_images_for_latest( + limit=min(args.limit, 30), + max_attempts=args.max_attempts, + dry_run=args.dry_run, + ) + ) + elapsed = time.monotonic() - start + print_result( + "refetch-images", + "ok", + processed=processed, + refreshed=refreshed, + dry_run=args.dry_run, + elapsed=f"{elapsed:.1f}s", + ) + return 0 + except Exception: + logger.exception("admin refetch-images failed") + print_result("refetch-images", "error") + return 1 + + +def handle_admin_clean_archive(args: argparse.Namespace) -> int: + if not require_confirm(args, "clean-archive"): + return 2 + + db = SessionLocal() + try: + cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=args.days) + query = db.query(NewsItem).filter( + and_(NewsItem.archived.is_(True), NewsItem.created_at < cutoff) + ) + count = query.count() + if args.dry_run: + print_result("clean-archive", "ok", dry_run=True, would_delete=count) + return 0 + deleted = delete_archived_news(db, days_after_archive=args.days) + print_result("clean-archive", "ok", deleted=deleted) + return 0 + except Exception: + logger.exception("admin clean-archive failed") + print_result("clean-archive", "error") + return 1 + finally: + db.close() + + +def handle_admin_clear_cache(args: argparse.Namespace) -> int: + if not require_confirm(args, "clear-cache"): + return 2 + + try: + os.makedirs(config.STATIC_IMAGES_DIR, exist_ok=True) + files = [ + os.path.join(config.STATIC_IMAGES_DIR, name) + for name in os.listdir(config.STATIC_IMAGES_DIR) + if name.lower().endswith((".jpg", ".jpeg", ".png", ".webp")) + ] + if args.dry_run: + print_result("clear-cache", "ok", dry_run=True, would_delete=len(files)) + return 0 + deleted = 0 + for file_path in files: + try: + os.remove(file_path) + deleted += 1 + except OSError: + logger.warning("Failed to remove cache file: %s", file_path) + print_result("clear-cache", "ok", deleted=deleted) + return 0 + except Exception: + logger.exception("admin clear-cache failed") + print_result("clear-cache", "error") + return 1 + + +def handle_admin_clear_news(args: argparse.Namespace) -> int: + if not require_confirm(args, "clear-news"): + return 2 + + db = SessionLocal() + try: + query = db.query(NewsItem) + if not args.include_archived: + query = query.filter(NewsItem.archived.is_(False)) + items = query.all() + if args.dry_run: + print_result("clear-news", "ok", dry_run=True, would_delete=len(items)) + return 0 + deleted = 0 + for item in items: + db.delete(item) + deleted += 1 + db.commit() + print_result("clear-news", "ok", deleted=deleted) + return 0 + except Exception: + db.rollback() + logger.exception("admin clear-news failed") + print_result("clear-news", "error") + return 1 + finally: + db.close() + + +def handle_admin_rebuild_site(args: argparse.Namespace) -> int: + if not require_confirm(args, "rebuild-site"): + return 2 + if args.dry_run: + print_result("rebuild-site", "ok", dry_run=True, steps="clear-news,fetch") + return 0 + + clear_result = handle_admin_clear_news( + argparse.Namespace(include_archived=True, confirm=True, dry_run=False) + ) + if clear_result != 0: + print_result("rebuild-site", "error", step="clear-news") + return clear_result + + fetch_result = handle_admin_fetch(argparse.Namespace(count=args.count)) + if fetch_result != 0: + print_result("rebuild-site", "error", step="fetch") + return fetch_result + + print_result("rebuild-site", "ok", count=args.count) + return 0 + + +def handle_admin_regenerate_translations(args: argparse.Namespace) -> int: + db = SessionLocal() + try: + query = db.query(NewsItem).filter(NewsItem.archived.is_(False)).order_by(desc(NewsItem.id)) + if args.limit and args.limit > 0: + query = query.limit(args.limit) + items = query.all() + + regenerated = 0 + for item in items: + tldr_points = resolve_tldr_points(item, None) + translations = asyncio.run( + generate_translations( + headline=item.headline, + summary=item.summary, + tldr_points=tldr_points, + summary_body=item.summary_body, + source_citation=item.source_citation, + ) + ) + for language_code, payload in translations.items(): + if args.dry_run: + regenerated += 1 + continue + existing = get_translation(db, item.id, language_code) + if existing is None: + create_translation( + db=db, + news_item_id=item.id, + language=language_code, + headline=payload["headline"], + summary=payload["summary"], + tldr_points=payload.get("tldr_points"), + summary_body=payload.get("summary_body"), + source_citation=payload.get("source_citation"), + ) + else: + existing.headline = payload["headline"] + existing.summary = payload["summary"] + existing.tldr_points = ( + json.dumps(payload.get("tldr_points")) + if payload.get("tldr_points") + else None + ) + existing.summary_body = payload.get("summary_body") + existing.source_citation = payload.get("source_citation") + regenerated += 1 + if not args.dry_run: + db.commit() + + print_result( + "regenerate-translations", + "ok", + articles=len(items), + regenerated=regenerated, + dry_run=args.dry_run, + ) + return 0 + except Exception: + db.rollback() + logger.exception("admin regenerate-translations failed") + print_result("regenerate-translations", "error") + return 1 + finally: + db.close() + + def main(argv: list[str] | None = None) -> int: parser = build_parser() args = parser.parse_args(argv) diff --git a/backend/news_service.py b/backend/news_service.py index dbe00e6..64bdd96 100644 --- a/backend/news_service.py +++ b/backend/news_service.py @@ -5,6 +5,7 @@ import logging import os import re import time +from collections.abc import Awaitable, Callable from io import BytesIO from urllib.parse import quote_plus @@ -23,6 +24,7 @@ from backend.repository import ( logger = logging.getLogger(__name__) PLACEHOLDER_IMAGE_PATH = "/static/images/placeholder.png" +GENERIC_AI_FALLBACK_URL = "https://placehold.co/1200x630/0f172a/e2e8f0/png?text=AI+News" async def call_perplexity_api(query: str) -> dict | None: @@ -419,7 +421,7 @@ def extract_image_keywords(headline: str) -> str: - Handles edge cases (empty, only stop words, special characters) """ if not headline or not headline.strip(): - return "news technology" + return "ai machine learning deep learning" # Normalize: remove special characters, keep alphanumeric and spaces cleaned = re.sub(r"[^\w\s]", " ", headline) @@ -433,7 +435,7 @@ def extract_image_keywords(headline: str) -> str: keywords = keywords[:5] if not keywords: - return "news technology" + return "ai machine learning deep learning" return " ".join(keywords) @@ -465,7 +467,7 @@ async def fetch_pixabay_image(query: str) -> tuple[str | None, str | None]: except Exception: logger.exception("Pixabay image retrieval failed") - return None, None + return GENERIC_AI_FALLBACK_URL, "Generic AI fallback" async def fetch_unsplash_image(query: str) -> tuple[str | None, str | None]: @@ -569,7 +571,9 @@ _PROVIDER_REGISTRY: dict[str, tuple] = { } -def get_enabled_providers() -> list[tuple[str, callable]]: +def get_enabled_providers() -> list[ + tuple[str, Callable[[str], Awaitable[tuple[str | None, str | None]]]] +]: """Get ordered list of enabled providers based on config and available API keys.""" provider_names = [ p.strip().lower() for p in config.ROYALTY_IMAGE_PROVIDERS.split(",") if p.strip() @@ -663,8 +667,16 @@ async def download_and_optimize_image(image_url: str) -> str | None: return None -async def fetch_news_with_retry(max_attempts: int = 3) -> list[dict]: +async def fetch_news_with_retry( + max_attempts: int = 3, article_count: int | None = None +) -> list[dict]: query = "What are the latest AI news from the last hour? Include source URLs and image URLs." + if article_count is not None: + bounded = max(1, min(50, int(article_count))) + query = ( + f"What are the latest AI news from the last hour? Return exactly {bounded} items. " + "Include source URLs and image URLs." + ) for attempt in range(max_attempts): try: @@ -687,8 +699,8 @@ async def fetch_news_with_retry(max_attempts: int = 3) -> list[dict]: return [] -async def process_and_store_news() -> int: - items = await fetch_news_with_retry() +async def process_and_store_news(article_count: int | None = None) -> int: + items = await fetch_news_with_retry(article_count=article_count) if not items: logger.warning("No news items fetched this cycle") return 0 diff --git a/backend/static/images/03d10966744a543b01f76d3f8f9f5a44.jpg b/backend/static/images/03d10966744a543b01f76d3f8f9f5a44.jpg new file mode 100644 index 0000000..c1469ef Binary files /dev/null and b/backend/static/images/03d10966744a543b01f76d3f8f9f5a44.jpg differ diff --git a/backend/static/images/0f256078c66f1923de1fce7e0ac70ba0.jpg b/backend/static/images/0f256078c66f1923de1fce7e0ac70ba0.jpg new file mode 100644 index 0000000..bef0715 Binary files /dev/null and b/backend/static/images/0f256078c66f1923de1fce7e0ac70ba0.jpg differ diff --git a/backend/static/images/16237b1efa58b893b9458713ff8304ad.jpg b/backend/static/images/16237b1efa58b893b9458713ff8304ad.jpg new file mode 100644 index 0000000..766f3b5 Binary files /dev/null and b/backend/static/images/16237b1efa58b893b9458713ff8304ad.jpg differ diff --git a/backend/static/images/173c86c39207d39e5684d356369046ef.jpg b/backend/static/images/173c86c39207d39e5684d356369046ef.jpg new file mode 100644 index 0000000..db3836f Binary files /dev/null and b/backend/static/images/173c86c39207d39e5684d356369046ef.jpg differ diff --git a/backend/static/images/1f35b940c79df0d9cd07650a5d439dac.jpg b/backend/static/images/1f35b940c79df0d9cd07650a5d439dac.jpg new file mode 100644 index 0000000..793c0d7 Binary files /dev/null and b/backend/static/images/1f35b940c79df0d9cd07650a5d439dac.jpg differ diff --git a/backend/static/images/1fd212a8fb8dfdcb72be46aaf8c27c80.jpg b/backend/static/images/1fd212a8fb8dfdcb72be46aaf8c27c80.jpg new file mode 100644 index 0000000..939f8d2 Binary files /dev/null and b/backend/static/images/1fd212a8fb8dfdcb72be46aaf8c27c80.jpg differ diff --git a/backend/static/images/2fbeef6191605d8a528ee5a0d7333d20.jpg b/backend/static/images/2fbeef6191605d8a528ee5a0d7333d20.jpg new file mode 100644 index 0000000..ea8588e Binary files /dev/null and b/backend/static/images/2fbeef6191605d8a528ee5a0d7333d20.jpg differ diff --git a/backend/static/images/3c406d580431ebfa2ec850741540632a.jpg b/backend/static/images/3c406d580431ebfa2ec850741540632a.jpg new file mode 100644 index 0000000..3657a32 Binary files /dev/null and b/backend/static/images/3c406d580431ebfa2ec850741540632a.jpg differ diff --git a/backend/static/images/453bc7a5351756e01b681f6be72eddf5.jpg b/backend/static/images/453bc7a5351756e01b681f6be72eddf5.jpg new file mode 100644 index 0000000..7b4aa5f Binary files /dev/null and b/backend/static/images/453bc7a5351756e01b681f6be72eddf5.jpg differ diff --git a/backend/static/images/4d69537bcdf5bee088ba2ad5371bb14d.jpg b/backend/static/images/4d69537bcdf5bee088ba2ad5371bb14d.jpg new file mode 100644 index 0000000..d970ab2 Binary files /dev/null and b/backend/static/images/4d69537bcdf5bee088ba2ad5371bb14d.jpg differ diff --git a/backend/static/images/593ebfe0d9f505d189822895e2eedeae.jpg b/backend/static/images/593ebfe0d9f505d189822895e2eedeae.jpg new file mode 100644 index 0000000..b3f1b6f Binary files /dev/null and b/backend/static/images/593ebfe0d9f505d189822895e2eedeae.jpg differ diff --git a/backend/static/images/636a9ea318f9c6d8e81682a4e934480a.jpg b/backend/static/images/636a9ea318f9c6d8e81682a4e934480a.jpg new file mode 100644 index 0000000..01c7a25 Binary files /dev/null and b/backend/static/images/636a9ea318f9c6d8e81682a4e934480a.jpg differ diff --git a/backend/static/images/6c9fee8a62ae79475312057f16fb8af0.jpg b/backend/static/images/6c9fee8a62ae79475312057f16fb8af0.jpg new file mode 100644 index 0000000..b0499ec Binary files /dev/null and b/backend/static/images/6c9fee8a62ae79475312057f16fb8af0.jpg differ diff --git a/backend/static/images/7ce1961a9a6d87e0af2a54840e7902a2.jpg b/backend/static/images/7ce1961a9a6d87e0af2a54840e7902a2.jpg new file mode 100644 index 0000000..c30d951 Binary files /dev/null and b/backend/static/images/7ce1961a9a6d87e0af2a54840e7902a2.jpg differ diff --git a/backend/static/images/808150f6bed9baf3a20bb4fc61cc6f4c.jpg b/backend/static/images/808150f6bed9baf3a20bb4fc61cc6f4c.jpg new file mode 100644 index 0000000..2a05ee2 Binary files /dev/null and b/backend/static/images/808150f6bed9baf3a20bb4fc61cc6f4c.jpg differ diff --git a/backend/static/images/8298d980d5a6fee90d308d170c57549d.jpg b/backend/static/images/8298d980d5a6fee90d308d170c57549d.jpg new file mode 100644 index 0000000..947271d Binary files /dev/null and b/backend/static/images/8298d980d5a6fee90d308d170c57549d.jpg differ diff --git a/backend/static/images/93c252929772709cbd517afce3ceac95.jpg b/backend/static/images/93c252929772709cbd517afce3ceac95.jpg new file mode 100644 index 0000000..5561d11 Binary files /dev/null and b/backend/static/images/93c252929772709cbd517afce3ceac95.jpg differ diff --git a/backend/static/images/a3b7c96431c59fb77a87d0a26d9b53b2.jpg b/backend/static/images/a3b7c96431c59fb77a87d0a26d9b53b2.jpg new file mode 100644 index 0000000..67a953b Binary files /dev/null and b/backend/static/images/a3b7c96431c59fb77a87d0a26d9b53b2.jpg differ diff --git a/backend/static/images/a9cd2b37ca1969434e65fb9bed294b74.jpg b/backend/static/images/a9cd2b37ca1969434e65fb9bed294b74.jpg new file mode 100644 index 0000000..9d5f02d Binary files /dev/null and b/backend/static/images/a9cd2b37ca1969434e65fb9bed294b74.jpg differ diff --git a/backend/static/images/b16ae02672911e2bdf655cbf96b9a2f7.jpg b/backend/static/images/b16ae02672911e2bdf655cbf96b9a2f7.jpg new file mode 100644 index 0000000..60759f3 Binary files /dev/null and b/backend/static/images/b16ae02672911e2bdf655cbf96b9a2f7.jpg differ diff --git a/backend/static/images/b3edebaad82cdbf32de85d8cc5f274ad.jpg b/backend/static/images/b3edebaad82cdbf32de85d8cc5f274ad.jpg new file mode 100644 index 0000000..fbfd0ee Binary files /dev/null and b/backend/static/images/b3edebaad82cdbf32de85d8cc5f274ad.jpg differ diff --git a/backend/static/images/bdd724aff1b83c6381da2d364b729326.jpg b/backend/static/images/bdd724aff1b83c6381da2d364b729326.jpg new file mode 100644 index 0000000..7b03547 Binary files /dev/null and b/backend/static/images/bdd724aff1b83c6381da2d364b729326.jpg differ diff --git a/backend/static/images/c638156b3201d2debc5ba5955fa54aa2.jpg b/backend/static/images/c638156b3201d2debc5ba5955fa54aa2.jpg new file mode 100644 index 0000000..28b2572 Binary files /dev/null and b/backend/static/images/c638156b3201d2debc5ba5955fa54aa2.jpg differ diff --git a/backend/static/images/d8e11fc6776d40a9389afe578313071b.jpg b/backend/static/images/d8e11fc6776d40a9389afe578313071b.jpg new file mode 100644 index 0000000..6d351d8 Binary files /dev/null and b/backend/static/images/d8e11fc6776d40a9389afe578313071b.jpg differ diff --git a/backend/static/images/e9f56690cf067da751ce58f2f59f4627.jpg b/backend/static/images/e9f56690cf067da751ce58f2f59f4627.jpg new file mode 100644 index 0000000..fadc48d Binary files /dev/null and b/backend/static/images/e9f56690cf067da751ce58f2f59f4627.jpg differ diff --git a/backend/static/images/e9fd9f16cc882f7d112cc174a2efefc7.jpg b/backend/static/images/e9fd9f16cc882f7d112cc174a2efefc7.jpg new file mode 100644 index 0000000..7a4c050 Binary files /dev/null and b/backend/static/images/e9fd9f16cc882f7d112cc174a2efefc7.jpg differ diff --git a/data/clawfort.db b/data/clawfort.db index 12d0ded..576e688 100644 Binary files a/data/clawfort.db and b/data/clawfort.db differ diff --git a/frontend/index.html b/frontend/index.html index 6140d76..ff3c0ae 100644 --- a/frontend/index.html +++ b/frontend/index.html @@ -126,11 +126,28 @@ } .theme-menu-item:hover { background: rgba(92, 124, 250, 0.15); } .hero-overlay { - background: linear-gradient(to top, rgba(15, 23, 42, 0.92), rgba(15, 23, 42, 0.45), transparent); + background: linear-gradient(to top, rgba(2, 6, 23, 0.94), rgba(15, 23, 42, 0.62), rgba(15, 23, 42, 0.22), transparent); } .hero-title { color: #e2e8f0; text-shadow: 0 2px 8px rgba(0, 0, 0, 0.55); } .hero-summary { color: #cbd5e1; text-shadow: 0 1px 6px rgba(0, 0, 0, 0.55); } - .hero-meta { color: #cbd5e1; } + .hero-meta { color: #e2e8f0; text-shadow: 0 1px 6px rgba(0, 0, 0, 0.55); } + .hero-latest-pill { + background: rgba(59, 130, 246, 0.2); + color: #dbeafe; + border: 1px solid rgba(147, 197, 253, 0.45); + } + .hero-time-pill { + background: rgba(15, 23, 42, 0.55); + color: #e2e8f0; + border: 1px solid rgba(148, 163, 184, 0.35); + padding: 3px 8px; + border-radius: 9999px; + } + .tldr-shimmer { + height: 12px; + width: 85%; + border-radius: 9999px; + } .news-card-title { color: var(--cf-text-strong); } .news-card-summary { color: var(--cf-text-muted); } .news-card-meta { color: var(--cf-text-muted); } @@ -157,7 +174,17 @@ color: #f8fafc; } html[data-theme='light'] .hero-overlay { - background: linear-gradient(to top, rgba(15, 23, 42, 0.9), rgba(15, 23, 42, 0.35), transparent); + background: linear-gradient(to top, rgba(15, 23, 42, 0.92), rgba(30, 41, 59, 0.58), rgba(30, 41, 59, 0.2), transparent); + } + html[data-theme='light'] .hero-latest-pill { + background: rgba(37, 99, 235, 0.24); + border-color: rgba(37, 99, 235, 0.55); + color: #eff6ff; + } + html[data-theme='light'] .hero-time-pill { + background: rgba(15, 23, 42, 0.52); + border-color: rgba(226, 232, 240, 0.35); + color: #f8fafc; } html[data-theme='light'] .news-card-btn { color: #1e3a8a; } html[data-theme='light'] .modal-cta { @@ -257,15 +284,15 @@
- LATEST - + LATEST +

-