chore: commit current DB and add SQL dump export/import scripts
Include data/drafts.db so other machines don't need to re-run expensive Claude API calls (~$3+ of analysis, 474 drafts, 403 authors, 1262 ideas, 12 gaps). Add scripts/db-export.sh and scripts/db-import.sh for portable compressed SQL dump sharing. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
20
scripts/db-export.sh
Executable file
20
scripts/db-export.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
# Export drafts.db as compressed SQL dump for portable sharing.
|
||||
# Usage: scripts/db-export.sh [output_path]
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DB="data/drafts.db"
|
||||
OUT="${1:-data/drafts.sql.gz}"
|
||||
|
||||
if [[ ! -f "$DB" ]]; then
|
||||
echo "Error: $DB not found" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Exporting $DB → $OUT"
|
||||
sqlite3 "$DB" .dump | gzip -9 > "$OUT"
|
||||
|
||||
SIZE=$(du -h "$OUT" | cut -f1)
|
||||
TABLES=$(sqlite3 "$DB" "SELECT count(*) FROM sqlite_master WHERE type='table';")
|
||||
echo "Done: $SIZE ($TABLES tables)"
|
||||
26
scripts/db-import.sh
Executable file
26
scripts/db-import.sh
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env bash
|
||||
# Import SQL dump into drafts.db, replacing existing data.
|
||||
# Usage: scripts/db-import.sh [input_path]
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DB="data/drafts.db"
|
||||
IN="${1:-data/drafts.sql.gz}"
|
||||
|
||||
if [[ ! -f "$IN" ]]; then
|
||||
echo "Error: $IN not found" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -f "$DB" ]]; then
|
||||
BACKUP="data/drafts.db.bak.$(date +%s)"
|
||||
echo "Backing up existing DB → $BACKUP"
|
||||
cp "$DB" "$BACKUP"
|
||||
rm "$DB"
|
||||
fi
|
||||
|
||||
echo "Importing $IN → $DB"
|
||||
gunzip -c "$IN" | sqlite3 "$DB"
|
||||
|
||||
ROWS=$(sqlite3 "$DB" "SELECT count(*) FROM drafts;")
|
||||
echo "Done: $ROWS drafts restored"
|
||||
Reference in New Issue
Block a user