diff --git a/.github/workflows/build-database.yml b/.github/workflows/build-database.yml new file mode 100644 index 0000000..9d7f4c4 --- /dev/null +++ b/.github/workflows/build-database.yml @@ -0,0 +1,248 @@ +name: Build Awesome Database + +on: + schedule: + # Run daily at 02:00 UTC + - cron: '0 2 * * *' + workflow_dispatch: # Allow manual triggering + inputs: + index_mode: + description: 'Indexing mode' + required: false + default: 'full' + type: choice + options: + - full + - sample + +permissions: + contents: read + actions: write + +jobs: + build-database: + runs-on: ubuntu-latest + timeout-minutes: 180 # 3 hours max + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '22' + + - name: Setup pnpm + uses: pnpm/action-setup@v3 + with: + version: 8 + + - name: Install dependencies + run: | + pnpm install + pnpm rebuild better-sqlite3 + + - name: Configure GitHub token for API access + run: | + chmod +x awesome + # Set GitHub token for higher rate limits (5000/hour vs 60/hour) + export GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" + node -e " + const db = require('./lib/database'); + const dbOps = require('./lib/db-operations'); + db.initialize(); + dbOps.setSetting('githubToken', process.env.GITHUB_TOKEN); + db.close(); + console.log('GitHub token configured'); + " + + - name: Build awesome database + id: build + run: | + # Capture start time + START_TIME=$(date -u +"%Y-%m-%d %H:%M:%S UTC") + echo "start_time=$START_TIME" >> $GITHUB_OUTPUT + + # Determine index mode + INDEX_MODE="${{ github.event.inputs.index_mode || 'full' }}" + echo "Index mode: $INDEX_MODE" + + # Build the index with automated selection + if [ "$INDEX_MODE" = "sample" ]; then + # For sample mode, we'll need to modify the script to accept input + echo "Building sample index (10 lists)..." + timeout 150m node -e " + const indexer = require('./lib/indexer'); + (async () => { + try { + // Simulate user choosing 'sample' option + process.stdin.push('sample\n'); + await indexer.buildIndex(false); + console.log('Sample index built successfully'); + process.exit(0); + } catch (error) { + console.error('Failed to build index:', error.message); + process.exit(1); + } + })(); + " || echo "Index building completed with timeout" + else + echo "Building full index..." + timeout 150m node -e " + const indexer = require('./lib/indexer'); + (async () => { + try { + // Simulate user choosing 'full' option + process.stdin.push('full\n'); + await indexer.buildIndex(false); + console.log('Full index built successfully'); + process.exit(0); + } catch (error) { + console.error('Failed to build index:', error.message); + process.exit(1); + } + })(); + " || echo "Index building completed with timeout" + fi + + # Capture end time + END_TIME=$(date -u +"%Y-%m-%d %H:%M:%S UTC") + echo "end_time=$END_TIME" >> $GITHUB_OUTPUT + + - name: Gather database statistics + id: stats + run: | + # Get database stats + STATS=$(node -e " + const db = require('./lib/database'); + const dbOps = require('./lib/db-operations'); + db.initialize(); + + const stats = dbOps.getIndexStats(); + const dbPath = require('path').join(require('os').homedir(), '.awesome', 'awesome.db'); + const fs = require('fs'); + const fileSize = fs.existsSync(dbPath) ? fs.statSync(dbPath).size : 0; + const fileSizeMB = (fileSize / (1024 * 1024)).toFixed(2); + + console.log(JSON.stringify({ + totalLists: stats.totalLists || 0, + totalRepos: stats.totalRepositories || 0, + totalReadmes: stats.totalReadmes || 0, + sizeBytes: fileSize, + sizeMB: fileSizeMB + })); + + db.close(); + ") + + echo "Database statistics:" + echo "$STATS" | jq . + + # Extract values for outputs + TOTAL_LISTS=$(echo "$STATS" | jq -r '.totalLists') + TOTAL_REPOS=$(echo "$STATS" | jq -r '.totalRepos') + TOTAL_READMES=$(echo "$STATS" | jq -r '.totalReadmes') + SIZE_MB=$(echo "$STATS" | jq -r '.sizeMB') + + echo "total_lists=$TOTAL_LISTS" >> $GITHUB_OUTPUT + echo "total_repos=$TOTAL_REPOS" >> $GITHUB_OUTPUT + echo "total_readmes=$TOTAL_READMES" >> $GITHUB_OUTPUT + echo "size_mb=$SIZE_MB" >> $GITHUB_OUTPUT + + - name: Prepare database artifact + run: | + # Copy database from home directory + DB_PATH="$HOME/.awesome/awesome.db" + + if [ ! -f "$DB_PATH" ]; then + echo "Error: Database file not found at $DB_PATH" + exit 1 + fi + + # Create artifact directory + mkdir -p artifacts + + # Copy database with timestamp + BUILD_DATE=$(date -u +"%Y%m%d-%H%M%S") + cp "$DB_PATH" "artifacts/awesome-${BUILD_DATE}.db" + cp "$DB_PATH" "artifacts/awesome-latest.db" + + # Create metadata file + cat > artifacts/metadata.json <> $GITHUB_STEP_SUMMARY <> $GITHUB_STEP_SUMMARY <> $GITHUB_STEP_SUMMARY <> $GITHUB_STEP_SUMMARY <> $GITHUB_STEP_SUMMARY + fi + + # Add kept artifacts table if any + if [ $KEEP_COUNT -gt 0 ]; then + cat >> $GITHUB_STEP_SUMMARY <> $GITHUB_STEP_SUMMARY + + if [ $KEEP_COUNT -gt 10 ]; then + echo "" >> $GITHUB_STEP_SUMMARY + echo "*...and $((KEEP_COUNT - 10)) more*" >> $GITHUB_STEP_SUMMARY + fi + fi + + cat >> $GITHUB_STEP_SUMMARY <> $GITHUB_STEP_SUMMARY < help | Command | Description | |---------|-------------| | `./awesome` | Interactive menu (recommended) | -| `./awesome index` | Build/rebuild index | +| `./awesome db` | Download pre-built database โšก | +| `./awesome index` | Build/rebuild index locally | | `./awesome search "query"` | Quick search | | `./awesome shell` | Interactive shell | | `./awesome browse` | Browse awesome lists | diff --git a/README.md b/README.md index 4f8c0db..870876b 100644 --- a/README.md +++ b/README.md @@ -38,11 +38,69 @@ Beautiful purple, pink, and gold gradient color scheme throughout the entire app ## ๐Ÿ“ฆ Installation +### Option 1: Use Pre-Built Database (Recommended) โšก + +Skip the lengthy indexing process! Download a pre-built database that's automatically updated daily. + +```bash +# Clone the repository +git clone https://github.com/YOUR_USERNAME/awesome.git +cd awesome + +# Install dependencies +pnpm install +pnpm rebuild better-sqlite3 +chmod +x awesome + +# Download pre-built database (easiest - uses GitHub CLI) +./awesome db + +# Or use the standalone script +./scripts/download-db.sh + +# Start using immediately! +./awesome +``` + +**Database is rebuilt daily** by GitHub Actions with full indexing of all awesome lists! + +**Two ways to download:** +- `./awesome db` - Built-in command with interactive menu +- `./scripts/download-db.sh` - Standalone script with more options + +#### Download Database Manually + +If you prefer manual download or the script doesn't work: + +```bash +# Install GitHub CLI if needed +# macOS: brew install gh +# Ubuntu: sudo apt install gh +# Windows: winget install GitHub.cli + +# Authenticate with GitHub +gh auth login + +# Download latest database artifact +gh run download --repo YOUR_USERNAME/awesome -n awesome-database-latest + +# Move to correct location +mkdir -p ~/.awesome +cp awesome-*.db ~/.awesome/awesome.db +``` + +### Option 2: Build Database Locally ๐Ÿ”จ + +Build the index yourself (takes 1-2 hours for full indexing): + ```bash cd /home/valknar/Projects/node.js/awesome pnpm install pnpm rebuild better-sqlite3 chmod +x awesome + +# Build the index +./awesome index ``` ## โšก GitHub Rate Limits - SOLVED with OAuth! ๐Ÿ” @@ -86,7 +144,10 @@ See [OAUTH_SETUP.md](OAUTH_SETUP.md) for complete guide! ### Commands ```bash -# Build the index (run this first!) +# Download pre-built database (fast!) +./awesome db + +# Build the index locally (slow - 1-2 hours) ./awesome index # Search @@ -164,6 +225,69 @@ The application uses SQLite3 with FTS5 for full-text search. Data is stored in ` - **Ora & Nanospinner** - Loading animations - **pnpm** - Fast, efficient package manager +## ๐Ÿค– Automated Database Builds + +The repository includes GitHub Actions workflows for automated database management: + +### Daily Database Build + +**Schedule:** Runs daily at 02:00 UTC + +**What it does:** +- Fetches all awesome lists from [sindresorhus/awesome](https://github.com/sindresorhus/awesome) +- Recursively indexes all README files +- Collects GitHub metadata (stars, forks, etc.) +- Compresses and uploads database as artifact +- Generates build report with statistics + +**Manual Trigger:** +You can manually trigger a database build from the GitHub Actions tab: +```bash +gh workflow run build-database.yml -f index_mode=full +``` + +**Artifact Details:** +- **Retention:** 90 days +- **Size:** ~50-200MB (compressed) +- **Contains:** Full database + metadata JSON +- **Naming:** `awesome-database-{run_id}` + +### Artifact Cleanup + +**Schedule:** Runs daily at 03:00 UTC (after database build) + +**What it does:** +- Removes artifacts older than 30 days (configurable) +- Cleans up old workflow runs +- Generates cleanup report +- Dry-run mode available for testing + +**Manual Trigger:** +```bash +# Standard cleanup (30 days retention) +gh workflow run cleanup-artifacts.yml + +# Custom retention period +gh workflow run cleanup-artifacts.yml -f retention_days=60 + +# Dry run (preview only) +gh workflow run cleanup-artifacts.yml -f dry_run=true +``` + +### Download Helper Script + +The `scripts/download-db.sh` script provides an interactive interface to: +- List available database builds +- View build metadata (date, size, commit) +- Download and install selected database +- Backup existing database automatically + +**Features:** +- Interactive selection menu +- Automatic backup of existing databases +- GitHub CLI integration +- Cross-platform support (Linux, macOS, Windows/Git Bash) + ## ๐Ÿ“ License MIT diff --git a/WORKFLOWS.md b/WORKFLOWS.md new file mode 100644 index 0000000..d5a9f65 --- /dev/null +++ b/WORKFLOWS.md @@ -0,0 +1,428 @@ +# GitHub Actions Workflows + +This document describes the automated workflows for building and managing the Awesome database. + +## Overview + +Two workflows automate database management: + +1. **Build Database** - Creates a fresh database daily +2. **Cleanup Artifacts** - Removes old artifacts to save storage + +## Build Database Workflow + +**File:** `.github/workflows/build-database.yml` + +### Schedule + +- **Automatic:** Daily at 02:00 UTC +- **Manual:** Can be triggered via GitHub Actions UI or CLI + +### Features + +#### Automatic Daily Builds +- Fetches [sindresorhus/awesome](https://github.com/sindresorhus/awesome) +- Recursively indexes all awesome lists +- Collects GitHub metadata (stars, forks, last commit) +- Generates full-text search index +- Compresses and uploads as artifact + +#### Build Modes + +**Full Mode** (default): +- Indexes all awesome lists +- Takes ~2-3 hours +- Produces comprehensive database + +**Sample Mode**: +- Indexes random sample of 10 lists +- Takes ~5-10 minutes +- Good for testing + +#### GitHub Token Integration +- Uses `GITHUB_TOKEN` secret for API access +- Provides 5,000 requests/hour (vs 60 without auth) +- Automatically configured during build + +### Manual Triggering + +#### Via GitHub CLI + +```bash +# Trigger full build +gh workflow run build-database.yml -f index_mode=full + +# Trigger sample build (for testing) +gh workflow run build-database.yml -f index_mode=sample + +# Check workflow status +gh run list --workflow=build-database.yml + +# View specific run +gh run view +``` + +#### Via GitHub UI + +1. Go to repository โ†’ Actions tab +2. Select "Build Awesome Database" workflow +3. Click "Run workflow" button +4. Choose index mode (full/sample) +5. Click "Run workflow" + +### Outputs + +#### Artifacts Uploaded + +- `awesome-{timestamp}.db` - Timestamped database file +- `awesome-latest.db` - Always points to newest build +- `metadata.json` - Build information + +**Artifact Naming:** `awesome-database-{run_id}` + +**Retention:** 90 days + +#### Metadata Structure + +```json +{ + "build_date": "2025-10-26 02:15:43 UTC", + "build_timestamp": 1730000143, + "git_sha": "abc123...", + "workflow_run_id": "12345678", + "total_lists": 450, + "total_repos": 15000, + "total_readmes": 12500, + "size_mb": 156.42, + "node_version": "v22.0.0", + "index_mode": "full" +} +``` + +#### Build Summary + +Each run generates a summary with: +- Statistics (lists, repos, READMEs, size) +- Build timing information +- Download instructions +- Direct artifact link + +### Monitoring + +#### Check Recent Runs + +```bash +# List last 10 runs +gh run list --workflow=build-database.yml --limit 10 + +# Show only failed runs +gh run list --workflow=build-database.yml --status failure + +# Watch current run +gh run watch +``` + +#### View Build Logs + +```bash +# Show logs for specific run +gh run view --log + +# Show only failed steps +gh run view --log-failed +``` + +## Cleanup Artifacts Workflow + +**File:** `.github/workflows/cleanup-artifacts.yml` + +### Schedule + +- **Automatic:** Daily at 03:00 UTC (after database build) +- **Manual:** Can be triggered with custom settings + +### Features + +#### Automatic Cleanup +- Removes artifacts older than 30 days (default) +- Cleans up old workflow runs (>30 days, keeping last 50) +- Generates detailed cleanup report +- Dry-run mode available + +#### Configurable Retention +- Default: 30 days +- Can be customized per run +- Artifacts within retention period are preserved + +### Manual Triggering + +#### Via GitHub CLI + +```bash +# Standard cleanup (30 days) +gh workflow run cleanup-artifacts.yml + +# Custom retention period (60 days) +gh workflow run cleanup-artifacts.yml -f retention_days=60 + +# Dry run (preview only, no deletions) +gh workflow run cleanup-artifacts.yml -f dry_run=true -f retention_days=30 + +# Aggressive cleanup (7 days) +gh workflow run cleanup-artifacts.yml -f retention_days=7 +``` + +#### Via GitHub UI + +1. Go to repository โ†’ Actions tab +2. Select "Cleanup Old Artifacts" workflow +3. Click "Run workflow" button +4. Configure options: + - **retention_days**: Days to keep (default: 30) + - **dry_run**: Preview mode (default: false) +5. Click "Run workflow" + +### Cleanup Report + +Each run generates a detailed report showing: + +#### Summary Statistics +- Total artifacts scanned +- Number deleted +- Number kept +- Storage space freed (MB) + +#### Deleted Artifacts Table +- Artifact name +- Size +- Creation date +- Age (in days) + +#### Kept Artifacts Table +- Recently created artifacts +- Artifacts within retention period +- Limited to first 10 for brevity + +### Storage Management + +#### Checking Storage Usage + +```bash +# List all artifacts with sizes +gh api repos/:owner/:repo/actions/artifacts \ + | jq -r '.artifacts[] | "\(.name) - \(.size_in_bytes / 1024 / 1024 | floor)MB - \(.created_at)"' + +# Calculate total storage +gh api repos/:owner/:repo/actions/artifacts \ + | jq '[.artifacts[].size_in_bytes] | add / 1024 / 1024 | floor' +``` + +#### Retention Strategy + +**Recommended settings:** +- **Production:** 30-60 days retention +- **Development:** 14-30 days retention +- **Testing:** 7-14 days retention + +**Storage limits:** +- Free GitHub: Limited artifact storage +- GitHub Pro: More generous limits +- GitHub Team/Enterprise: Higher limits + +## Downloading Databases + +### Method 1: Interactive Script (Recommended) + +```bash +./scripts/download-db.sh +``` + +**Features:** +- Lists all available builds +- Shows metadata (date, size, commit) +- Interactive selection +- Automatic backup of existing database +- Progress indication + +**Usage:** +```bash +# Interactive mode +./scripts/download-db.sh + +# Specify repository +./scripts/download-db.sh --repo owner/awesome + +# Download latest automatically +./scripts/download-db.sh --repo owner/awesome --latest +``` + +### Method 2: GitHub CLI Direct + +```bash +# List available artifacts +gh api repos/OWNER/REPO/actions/artifacts | jq -r '.artifacts[].name' + +# Download specific run +gh run download -n awesome-database- + +# Extract and install +mkdir -p ~/.awesome +cp awesome-*.db ~/.awesome/awesome.db +``` + +### Method 3: GitHub API + +```bash +# Get latest successful run +RUN_ID=$(gh api repos/OWNER/REPO/actions/workflows/build-database.yml/runs \ + | jq -r '.workflow_runs[0].id') + +# Download artifact +gh run download $RUN_ID -n awesome-database-$RUN_ID +``` + +## Troubleshooting + +### Build Failures + +**Problem:** Workflow fails during indexing + +**Solutions:** +1. Check API rate limits +2. Review build logs: `gh run view --log-failed` +3. Try sample mode for testing +4. Check GitHub status page + +**Common Issues:** +- GitHub API rate limiting +- Network timeouts +- Invalid awesome list URLs + +### Download Issues + +**Problem:** Cannot download artifacts + +**Solutions:** +1. Ensure GitHub CLI is authenticated: `gh auth status` +2. Check artifact exists: `gh run list --workflow=build-database.yml` +3. Verify artifact hasn't expired (90 days) +4. Try alternative download method + +### Storage Issues + +**Problem:** Running out of artifact storage + +**Solutions:** +1. Reduce retention period: `gh workflow run cleanup-artifacts.yml -f retention_days=14` +2. Run manual cleanup: `gh workflow run cleanup-artifacts.yml` +3. Check current usage with GitHub API +4. Consider upgrading GitHub plan + +### Permission Issues + +**Problem:** Workflow lacks permissions + +**Solutions:** +1. Verify `GITHUB_TOKEN` has required scopes +2. Check workflow permissions in `.yml` file +3. Review repository settings โ†’ Actions โ†’ General + +## Best Practices + +### For Maintainers + +1. **Monitor Build Success Rate** + - Set up notifications for failed builds + - Review logs regularly + - Keep dependencies updated + +2. **Optimize Build Times** + - Use sample mode for development + - Cache dependencies when possible + - Monitor for slow API responses + +3. **Manage Storage** + - Run cleanups regularly + - Adjust retention based on usage + - Archive important builds + +4. **Documentation** + - Keep artifact metadata updated + - Document any custom configurations + - Update README with changes + +### For Users + +1. **Download Strategy** + - Use latest builds for current data + - Check metadata before downloading + - Keep local backup of preferred versions + +2. **Update Frequency** + - Daily builds provide fresh data + - Weekly downloads usually sufficient + - On-demand for specific needs + +3. **Storage Management** + - Clean old local databases + - Use compression for backups + - Verify database integrity after download + +## Advanced Usage + +### Custom Build Scripts + +You can create custom workflows based on the provided templates: + +```yaml +# Example: Weekly comprehensive build +name: Weekly Full Index +on: + schedule: + - cron: '0 0 * * 0' # Sundays at midnight + workflow_dispatch: + +jobs: + build: + uses: ./.github/workflows/build-database.yml + with: + index_mode: full +``` + +### Notification Integration + +Add notifications to workflow: + +```yaml +- name: Notify on completion + if: always() + run: | + # Send to Slack, Discord, email, etc. + curl -X POST $WEBHOOK_URL -d "Build completed: ${{ job.status }}" +``` + +### Multi-Platform Builds + +Extend workflow for different platforms: + +```yaml +strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + node-version: [22, 20, 18] +``` + +## Resources + +- [GitHub Actions Documentation](https://docs.github.com/en/actions) +- [GitHub CLI Manual](https://cli.github.com/manual/) +- [Artifact Storage Limits](https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions) +- [Workflow Syntax](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions) + +## Support + +For issues or questions: +1. Check this documentation +2. Review workflow logs +3. Open an issue in the repository +4. Consult GitHub Actions documentation diff --git a/awesome b/awesome index 2349fb3..367835c 100755 --- a/awesome +++ b/awesome @@ -110,6 +110,14 @@ program await checkout.cloneRepository(repo, options.directory); }); +program + .command('db') + .description('Download pre-built database from GitHub Actions') + .action(async () => { + const dbDownload = require('./lib/db-download'); + await dbDownload.manage(); + }); + // If no command is provided, show the main menu if (process.argv.length === 2) { (async () => { diff --git a/lib/db-download.js b/lib/db-download.js new file mode 100644 index 0000000..d899265 --- /dev/null +++ b/lib/db-download.js @@ -0,0 +1,364 @@ +const { execSync, spawn } = require('child_process'); +const fs = require('fs'); +const path = require('path'); +const os = require('os'); +const inquirer = require('inquirer'); +const chalk = require('chalk'); +const ora = require('ora'); +const { purpleGold, pinkPurple, goldPink, sectionHeader } = require('./banner'); + +const DB_DIR = path.join(os.homedir(), '.awesome'); +const DB_FILE = path.join(DB_DIR, 'awesome.db'); + +// Check if GitHub CLI is installed +function checkGhCli() { + try { + execSync('gh --version', { stdio: 'ignore' }); + return true; + } catch { + return false; + } +} + +// Check if authenticated with GitHub CLI +function checkGhAuth() { + try { + execSync('gh auth status', { stdio: 'ignore' }); + return true; + } catch { + return false; + } +} + +// Get repository from git remote +function getRepository() { + try { + const remote = execSync('git remote get-url origin', { encoding: 'utf-8' }).trim(); + const match = remote.match(/github\.com[:/]([^/]+\/[^/]+?)(\.git)?$/); + if (match) { + return match[1]; + } + } catch { + // Not a git repository or no remote + } + return null; +} + +// Fetch workflow runs +async function fetchWorkflowRuns(repo, limit = 10) { + try { + const output = execSync( + `gh api -H "Accept: application/vnd.github+json" "/repos/${repo}/actions/workflows/build-database.yml/runs?per_page=${limit}&status=success"`, + { encoding: 'utf-8' } + ); + const data = JSON.parse(output); + return data.workflow_runs || []; + } catch (error) { + throw new Error(`Failed to fetch workflow runs: ${error.message}`); + } +} + +// Fetch artifacts for a run +async function fetchArtifacts(repo, runId) { + try { + const output = execSync( + `gh api -H "Accept: application/vnd.github+json" "/repos/${repo}/actions/runs/${runId}/artifacts"`, + { encoding: 'utf-8' } + ); + const data = JSON.parse(output); + return data.artifacts || []; + } catch (error) { + throw new Error(`Failed to fetch artifacts: ${error.message}`); + } +} + +// Format date +function formatDate(dateString) { + const date = new Date(dateString); + return date.toLocaleString('en-US', { + year: 'numeric', + month: 'short', + day: '2-digit', + hour: '2-digit', + minute: '2-digit' + }); +} + +// Format size +function formatSize(bytes) { + const mb = bytes / (1024 * 1024); + return `${mb.toFixed(1)} MB`; +} + +// List available databases +async function listDatabases(repo) { + const spinner = ora(chalk.hex('#DA22FF')('Fetching available databases...')).start(); + + try { + const runs = await fetchWorkflowRuns(repo, 10); + + if (runs.length === 0) { + spinner.fail(chalk.red('No database builds found')); + return null; + } + + // Fetch artifacts for each run + const runsWithArtifacts = []; + for (const run of runs) { + const artifacts = await fetchArtifacts(repo, run.id); + const dbArtifact = artifacts.find(a => a.name.startsWith('awesome-database')); + + if (dbArtifact) { + runsWithArtifacts.push({ + runId: run.id, + createdAt: run.created_at, + sha: run.head_sha.substring(0, 7), + artifact: dbArtifact + }); + } + } + + spinner.succeed(chalk.green(`Found ${runsWithArtifacts.length} available databases`)); + + if (runsWithArtifacts.length === 0) { + return null; + } + + return runsWithArtifacts; + } catch (error) { + spinner.fail(chalk.red(error.message)); + return null; + } +} + +// Download and install database +async function downloadDatabase(repo, runId, artifactName) { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'awesome-db-')); + + try { + // Download artifact + const spinner = ora(chalk.hex('#FF69B4')('Downloading database...')).start(); + + const downloadProcess = spawn('gh', ['run', 'download', runId, '-R', repo, '-D', tempDir], { + stdio: 'pipe' + }); + + await new Promise((resolve, reject) => { + downloadProcess.on('close', (code) => { + if (code === 0) { + resolve(); + } else { + reject(new Error(`Download failed with code ${code}`)); + } + }); + downloadProcess.on('error', reject); + }); + + spinner.succeed(chalk.green('Downloaded successfully')); + + // Find database file + const files = fs.readdirSync(tempDir, { recursive: true, withFileTypes: true }); + const dbFile = files.find(f => f.isFile() && f.name.endsWith('.db')); + + if (!dbFile) { + throw new Error('Database file not found in artifact'); + } + + const dbPath = path.join(dbFile.path || tempDir, dbFile.name); + + // Backup existing database + if (fs.existsSync(DB_FILE)) { + const backupFile = `${DB_FILE}.backup.${Date.now()}`; + console.log(chalk.yellow(`\nโš ๏ธ Backing up existing database to:`)); + console.log(chalk.gray(` ${backupFile}`)); + fs.copyFileSync(DB_FILE, backupFile); + } + + // Create directory if needed + if (!fs.existsSync(DB_DIR)) { + fs.mkdirSync(DB_DIR, { recursive: true }); + } + + // Copy database + fs.copyFileSync(dbPath, DB_FILE); + + const size = fs.statSync(DB_FILE).size; + console.log(chalk.green(`\nโœ“ Database installed successfully!`)); + console.log(chalk.gray(` Location: ${DB_FILE}`)); + console.log(chalk.gray(` Size: ${formatSize(size)}`)); + + // Show metadata if available + const metadataFile = files.find(f => f.isFile() && f.name === 'metadata.json'); + if (metadataFile) { + const metadataPath = path.join(metadataFile.path || tempDir, metadataFile.name); + const metadata = JSON.parse(fs.readFileSync(metadataPath, 'utf-8')); + + console.log(chalk.hex('#FFD700')('\n๐Ÿ“Š Build Information:')); + console.log(chalk.gray(` Build Date: ${metadata.build_date}`)); + console.log(chalk.gray(` Total Lists: ${metadata.total_lists}`)); + console.log(chalk.gray(` Total Repos: ${metadata.total_repos}`)); + console.log(chalk.gray(` Total READMEs: ${metadata.total_readmes}`)); + console.log(chalk.gray(` Index Mode: ${metadata.index_mode}`)); + } + } finally { + // Cleanup temp directory + fs.rmSync(tempDir, { recursive: true, force: true }); + } +} + +// Main function +async function manage() { + console.clear(); + sectionHeader('DATABASE DOWNLOADER', '๐Ÿ’พ'); + + // Check prerequisites + if (!checkGhCli()) { + console.log(chalk.red('โœ— GitHub CLI (gh) is not installed\n')); + console.log(chalk.gray('Install from: https://cli.github.com/\n')); + console.log(chalk.gray('Quick install:')); + console.log(chalk.gray(' โ€ข macOS: brew install gh')); + console.log(chalk.gray(' โ€ข Ubuntu: sudo apt install gh')); + console.log(chalk.gray(' โ€ข Windows: winget install GitHub.cli\n')); + return; + } + + if (!checkGhAuth()) { + console.log(chalk.yellow('โš ๏ธ Not authenticated with GitHub CLI\n')); + + const { authenticate } = await inquirer.prompt([ + { + type: 'confirm', + name: 'authenticate', + message: 'Authenticate now?', + default: true + } + ]); + + if (!authenticate) { + console.log(chalk.gray('\nCancelled')); + return; + } + + try { + execSync('gh auth login', { stdio: 'inherit' }); + } catch { + console.log(chalk.red('\nโœ— Authentication failed')); + return; + } + + console.log(chalk.green('\nโœ“ Authenticated successfully\n')); + } + + // Get repository + let repo = getRepository(); + + if (!repo) { + const { inputRepo } = await inquirer.prompt([ + { + type: 'input', + name: 'inputRepo', + message: purpleGold('Enter GitHub repository (owner/repo):'), + validate: (input) => { + if (!input.match(/^[^/]+\/[^/]+$/)) { + return 'Please enter in format: owner/repo'; + } + return true; + } + } + ]); + repo = inputRepo; + } else { + console.log(purpleGold(`Repository: ${repo}\n`)); + } + + // List databases + const databases = await listDatabases(repo); + + if (!databases || databases.length === 0) { + console.log(chalk.yellow('\nโš ๏ธ No databases available for download')); + console.log(chalk.gray(' Database builds are created by GitHub Actions')); + console.log(chalk.gray(' Check the Actions tab in your repository\n')); + return; + } + + // Show table + console.log(chalk.hex('#DA22FF')('\nAvailable Databases:\n')); + + const Table = require('cli-table3'); + const table = new Table({ + head: [ + chalk.hex('#DA22FF')('#'), + chalk.hex('#DA22FF')('Build Date'), + chalk.hex('#DA22FF')('Commit'), + chalk.hex('#DA22FF')('Size') + ], + colWidths: [5, 25, 12, 12], + style: { + head: [], + border: ['gray'] + } + }); + + databases.forEach((db, idx) => { + table.push([ + chalk.gray(idx + 1), + chalk.hex('#FF69B4')(formatDate(db.createdAt)), + chalk.hex('#FFD700')(db.sha), + chalk.hex('#9733EE')(formatSize(db.artifact.size_in_bytes)) + ]); + }); + + console.log(table.toString()); + + // Select database + const choices = [ + ...databases.map((db, idx) => ({ + name: `${idx + 1}. ${formatDate(db.createdAt)} (${db.sha}) - ${formatSize(db.artifact.size_in_bytes)}`, + value: idx + })), + new inquirer.Separator(), + { name: chalk.gray('โ† Cancel'), value: -1 } + ]; + + const { selection } = await inquirer.prompt([ + { + type: 'list', + name: 'selection', + message: 'Select a database to download:', + choices: choices, + pageSize: 12 + } + ]); + + if (selection === -1) { + console.log(chalk.gray('\nCancelled')); + return; + } + + const selectedDb = databases[selection]; + + // Confirm download + const { confirm } = await inquirer.prompt([ + { + type: 'confirm', + name: 'confirm', + message: `Download database from ${formatDate(selectedDb.createdAt)}?`, + default: true + } + ]); + + if (!confirm) { + console.log(chalk.gray('\nCancelled')); + return; + } + + // Download and install + await downloadDatabase(repo, selectedDb.runId, selectedDb.artifact.name); + + console.log(chalk.hex('#FFD700')('\n๐ŸŽ‰ Ready to use!')); + console.log(chalk.gray(' Run: ./awesome\n')); +} + +module.exports = { + manage +}; diff --git a/lib/menu.js b/lib/menu.js index fe1433d..6d116d7 100644 --- a/lib/menu.js +++ b/lib/menu.js @@ -22,6 +22,7 @@ async function showMainMenu() { { name: `${chalk.hex('#DA22FF')('๐Ÿ“–')} Reading History`, value: 'history' }, new inquirer.Separator(chalk.gray('โ”€'.repeat(50))), { name: `${chalk.hex('#FF69B4')('๐Ÿ”ง')} Build/Rebuild Index`, value: 'index' }, + { name: `${chalk.hex('#9733EE')('๐Ÿ’พ')} Download Pre-Built Database`, value: 'db' }, { name: `${chalk.hex('#FFD700')('๐Ÿ“Š')} Statistics`, value: 'stats' }, { name: `${chalk.hex('#DA22FF')('โš™๏ธ')} Settings`, value: 'settings' }, new inquirer.Separator(chalk.gray('โ”€'.repeat(50))), @@ -98,6 +99,11 @@ async function handleMenuChoice(choice) { await settings.manage(); break; + case 'db': + const dbDownload = require('./db-download'); + await dbDownload.manage(); + break; + default: console.log(chalk.yellow('Invalid choice')); }