chore: commit current DB and add SQL dump export/import scripts

Include data/drafts.db so other machines don't need to re-run
expensive Claude API calls (~$3+ of analysis, 474 drafts, 403 authors,
1262 ideas, 12 gaps). Add scripts/db-export.sh and scripts/db-import.sh
for portable compressed SQL dump sharing.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-09 03:21:00 +01:00
parent 5ec7410b89
commit 4710668419
3 changed files with 46 additions and 0 deletions

26
scripts/db-import.sh Executable file
View File

@@ -0,0 +1,26 @@
#!/usr/bin/env bash
# Import SQL dump into drafts.db, replacing existing data.
# Usage: scripts/db-import.sh [input_path]
set -euo pipefail
DB="data/drafts.db"
IN="${1:-data/drafts.sql.gz}"
if [[ ! -f "$IN" ]]; then
echo "Error: $IN not found" >&2
exit 1
fi
if [[ -f "$DB" ]]; then
BACKUP="data/drafts.db.bak.$(date +%s)"
echo "Backing up existing DB → $BACKUP"
cp "$DB" "$BACKUP"
rm "$DB"
fi
echo "Importing $IN$DB"
gunzip -c "$IN" | sqlite3 "$DB"
ROWS=$(sqlite3 "$DB" "SELECT count(*) FROM drafts;")
echo "Done: $ROWS drafts restored"