#!/bin/bash # NocoDB Auto-Setup Script # This script automatically creates the necessary base and tables for the BNKops Map Viewer application using NocoDB. # Based on requirements from README.md and using proper NocoDB column types # # Creates six tables: # 1. locations - Main table with GeoData, proper field types per README.md # 2. login - Simple authentication table with Email, Name, Admin fields and temp user support # 3. settings - Configuration table with text fields only (no QR image storage) # 4. shifts - Table for volunteer shift scheduling with public visibility support # 5. shift_signups - Table for tracking signups to shifts with source tracking and phone numbers # 6. cuts - Table for storing polygon overlays for the map # # Updated: September 2025 - Added data migration option from existing NocoDB bases # Usage: # ./build-nocodb.sh # Create new base only # ./build-nocodb.sh --migrate-data # Create new base with data migration option # ./build-nocodb.sh --help # Show usage information set -e # Exit on any error # Global variables for migration MIGRATE_DATA=false SOURCE_BASE_ID="" SOURCE_TABLE_IDS="" # Colors for output RED='\033[0;31m' GREEN='\033[0;32m' YELLOW='\033[1;33m' BLUE='\033[0;34m' NC='\033[0m' # No Color # Function to print colored output print_status() { echo -e "${BLUE}[INFO]${NC} $1" >&2 } print_success() { echo -e "${GREEN}[SUCCESS]${NC} $1" >&2 } print_warning() { echo -e "${YELLOW}[WARNING]${NC} $1" >&2 } print_error() { echo -e "${RED}[ERROR]${NC} $1" >&2 } # Function to show usage information show_usage() { cat << EOF NocoDB Auto-Setup Script USAGE: $0 [OPTIONS] OPTIONS: --migrate-data Skip interactive prompt and enable data migration mode --help Show this help message --test-export Test export functionality (for debugging) DESCRIPTION: This script creates a new NocoDB base with the required tables for the Map Viewer application. Interactive mode (default): Prompts you to choose between fresh installation or data migration. With --migrate-data option, skips the prompt and goes directly to migration setup, allowing you to select an existing base and migrate data from specific tables to the new base. EXAMPLES: $0 # Interactive mode - choose fresh or migration $0 --migrate-data # Skip prompt, go directly to migration setup $0 --help # Show this help MIGRATION FEATURES: - Automatically detects current base from .env file settings - Interactive base and table selection with clear guidance - Filters out auto-generated columns (CreatedAt, UpdatedAt, etc.) - Preserves original data (creates new base, doesn't modify existing) - Progress tracking during import with detailed success/failure reporting EOF } # Parse command line arguments parse_arguments() { while [[ $# -gt 0 ]]; do case $1 in --migrate-data) MIGRATE_DATA=true shift ;; --test-export) print_status "Test mode - export functionality verification" print_status "This would test the pagination logic with your current setup" print_warning "Test mode not yet implemented - use normal migration to test" exit 0 ;; --help) show_usage exit 0 ;; *) print_error "Unknown option: $1" show_usage exit 1 ;; esac done } # Load environment variables if [ -f ".env" ]; then # Use set -a to automatically export variables set -a source .env set +a print_success "Environment variables loaded from .env" else print_error ".env file not found!" exit 1 fi # Validate required environment variables if [ -z "$NOCODB_API_URL" ] || [ -z "$NOCODB_API_TOKEN" ]; then print_error "Required environment variables NOCODB_API_URL and NOCODB_API_TOKEN not set!" exit 1 fi # Check for required dependencies check_dependencies() { local missing_deps=() # Check for jq (required for JSON parsing in migration) if ! command -v jq &> /dev/null; then missing_deps+=("jq") fi # Check for curl (should be available but let's verify) if ! command -v curl &> /dev/null; then missing_deps+=("curl") fi if [[ ${#missing_deps[@]} -gt 0 ]]; then print_error "Missing required dependencies: ${missing_deps[*]}" print_error "Please install the missing dependencies before running this script" print_status "On Ubuntu/Debian: sudo apt-get install ${missing_deps[*]}" print_status "On CentOS/RHEL: sudo yum install ${missing_deps[*]}" print_status "On macOS: brew install ${missing_deps[*]}" exit 1 fi } # Check dependencies check_dependencies # Extract base URL from API URL and set up v2 API endpoints BASE_URL=$(echo "$NOCODB_API_URL" | sed 's|/api/v1||') API_BASE_V1="$NOCODB_API_URL" API_BASE_V2="${BASE_URL}/api/v2" print_status "Using NocoDB instance: $BASE_URL" # Function to make API calls with proper error handling make_api_call() { local method=$1 local endpoint=$2 local data=$3 local description=$4 local api_version=${5:-"v2"} # Default to v2 print_status "$description" local response local http_code local full_url if [[ "$api_version" == "v1" ]]; then full_url="$API_BASE_V1$endpoint" else full_url="$API_BASE_V2$endpoint" fi print_status "Making $method request to: $full_url" if [ "$method" = "GET" ]; then response=$(curl -s -w "%{http_code}" -H "xc-token: $NOCODB_API_TOKEN" \ -H "Content-Type: application/json" \ --max-time 60 \ "$full_url" 2>/dev/null) curl_exit_code=$? else response=$(curl -s -w "%{http_code}" -X "$method" \ -H "xc-token: $NOCODB_API_TOKEN" \ -H "Content-Type: application/json" \ --max-time 60 \ -d "$data" \ "$full_url" 2>/dev/null) curl_exit_code=$? fi if [[ $curl_exit_code -ne 0 ]]; then print_error "Network error occurred while making API call (curl exit code: $curl_exit_code)" return 1 fi if [[ -z "$response" ]]; then print_error "Empty response from API call" return 1 fi http_code="${response: -3}" response_body="${response%???}" print_status "HTTP Code: $http_code" print_status "Response preview: ${response_body:0:200}..." if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then print_success "$description completed successfully" echo "$response_body" else print_error "$description failed with HTTP code: $http_code" print_error "Full URL: $full_url" print_error "Response: $response_body" return 1 fi } # Function to create a project/base create_project() { local project_name="$1" local project_data='{ "title": "'"$project_name"'", "description": "Auto-generated project for NocoDB Map Viewer", "color": "#24716E" }' make_api_call "POST" "/meta/bases" "$project_data" "Creating project: $project_name" "v2" } # Function to create a table create_table() { local base_id=$1 local table_name=$2 local table_data=$3 local description=$4 # Always create new table (no checking for existing) local response response=$(make_api_call "POST" "/meta/bases/$base_id/tables" "$table_data" "Creating table: $table_name ($description)" "v2") if [[ $? -eq 0 && -n "$response" ]]; then # Extract table ID from response local table_id table_id=$(echo "$response" | grep -o '"id":"[^"]*"' | head -1 | cut -d'"' -f4) if [[ -n "$table_id" ]]; then print_success "Table '$table_name' created with ID: $table_id" echo "$table_id" else print_error "Failed to extract table ID from response" return 1 fi else print_error "Failed to create table: $table_name" return 1 fi } # Function to test API connectivity test_api_connectivity() { print_status "Testing API connectivity..." # Test basic connectivity first if ! curl -s --max-time 10 -I "$BASE_URL" > /dev/null 2>&1; then print_error "Cannot reach NocoDB instance at $BASE_URL" return 1 fi # Test API with token using v2 endpoint local test_response test_response=$(curl -s --max-time 10 -w "%{http_code}" -H "xc-token: $NOCODB_API_TOKEN" \ -H "Content-Type: application/json" \ "$API_BASE_V2/meta/bases" 2>/dev/null || echo "CURL_ERROR") if [[ "$test_response" == "CURL_ERROR" ]]; then print_error "Network error when testing API" return 1 fi local http_code="${test_response: -3}" local response_body="${test_response%???}" if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then print_success "API connectivity test successful" return 0 else print_error "API test failed with HTTP code: $http_code" print_error "Response: $response_body" return 1 fi } # Function to list all available bases list_available_bases() { print_status "Fetching available NocoDB bases..." local response response=$(make_api_call "GET" "/meta/bases" "" "Fetching bases list" "v2") if [[ $? -eq 0 && -n "$response" ]]; then echo "$response" return 0 else print_error "Failed to fetch bases list" return 1 fi } # Function to list tables in a specific base list_base_tables() { local base_id=$1 print_status "Fetching tables for base: $base_id" local response response=$(make_api_call "GET" "/meta/bases/$base_id/tables" "" "Fetching tables list" "v2") if [[ $? -eq 0 && -n "$response" ]]; then echo "$response" return 0 else print_error "Failed to fetch tables list for base: $base_id" return 1 fi } # Function to export data from a table export_table_data() { local base_id=$1 local table_id=$2 local table_name=$3 print_status "Exporting data from table: $table_name (ID: $table_id)" # First, get total count of records using a minimal request local count_response count_response=$(make_api_call "GET" "/tables/$table_id/records?limit=1" "" "Getting record count for $table_name" "v2") if [[ $? -ne 0 ]]; then print_error "Failed to get record count for table: $table_name" return 1 fi # Extract total count from pageInfo local total_count total_count=$(echo "$count_response" | jq -r '.pageInfo.totalRows // 0' 2>/dev/null) if [[ -z "$total_count" || "$total_count" == "null" || "$total_count" -eq 0 ]]; then print_warning "No records found in table: $table_name" echo '{"list":[],"pageInfo":{"totalRows":0}}' return 0 fi print_status "Found $total_count records in table: $table_name" # If we have a small number of records, get them all at once if [[ "$total_count" -le 100 ]]; then local response response=$(make_api_call "GET" "/tables/$table_id/records?limit=$total_count" "" "Exporting all $total_count records from $table_name" "v2") if [[ $? -eq 0 && -n "$response" ]]; then echo "$response" return 0 else print_error "Failed to export data from table: $table_name" return 1 fi else # For larger datasets, paginate through all records in smaller chunks print_status "Dataset with $total_count records detected. Paginating in chunks of 100..." local all_records="[]" local offset=0 local limit=100 local batch_num=1 while [[ $offset -lt $total_count ]]; do local remaining=$((total_count - offset)) local current_limit=$limit if [[ $remaining -lt $limit ]]; then current_limit=$remaining fi print_status "Fetching batch $batch_num: records $((offset + 1)) to $((offset + current_limit)) of $total_count" local batch_response local retry_count=0 local max_retries=3 while [[ $retry_count -lt $max_retries ]]; do batch_response=$(make_api_call "GET" "/tables/$table_id/records?limit=$current_limit&offset=$offset" "" "Fetching batch $batch_num (attempt $((retry_count + 1)))" "v2") if [[ $? -eq 0 ]]; then break else retry_count=$((retry_count + 1)) if [[ $retry_count -lt $max_retries ]]; then print_warning "Batch $batch_num failed, retrying in 3 seconds... (attempt $retry_count/$max_retries)" sleep 3 else print_error "Failed to fetch batch $batch_num from table: $table_name after $max_retries attempts" return 1 fi fi done # Extract records from this batch and merge with all_records local batch_records batch_records=$(echo "$batch_response" | jq -r '.list' 2>/dev/null) if [[ -n "$batch_records" && "$batch_records" != "null" && "$batch_records" != "[]" ]]; then all_records=$(echo "$all_records $batch_records" | jq -s 'add' 2>/dev/null) fi offset=$((offset + current_limit)) batch_num=$((batch_num + 1)) # Conservative delays to avoid overwhelming the API if [[ $total_count -gt 2000 ]]; then sleep 2 # Longer delay for very large datasets elif [[ $total_count -gt 500 ]]; then sleep 1 # Medium delay for medium datasets else sleep 0.5 # Short delay for smaller datasets fi done # Return the complete dataset in the expected format local final_response final_response=$(jq -n --argjson records "$all_records" --argjson total "$total_count" '{"list": $records, "pageInfo": {"totalRows": $total}}') print_success "Successfully exported all $total_count records from table: $table_name" echo "$final_response" return 0 fi } # Function to import data into a table import_table_data() { local base_id=$1 local table_id=$2 local table_name=$3 local data=$4 # Check if data contains records local record_count=$(echo "$data" | grep -o '"list":\[' | wc -l) if [[ $record_count -eq 0 ]]; then print_warning "No records found in source table: $table_name" return 0 fi # Extract the records array from the response local records_array records_array=$(echo "$data" | jq -r '.list' 2>/dev/null) if [[ -z "$records_array" || "$records_array" == "[]" || "$records_array" == "null" ]]; then print_warning "No records to import for table: $table_name" return 0 fi print_status "Importing data into table: $table_name (ID: $table_id)" # Count total records first local total_records total_records=$(echo "$records_array" | jq 'length' 2>/dev/null) print_status "Found $total_records records to import into $table_name" local import_count=0 local success_count=0 # Create temporary file to track results across subshell local temp_file="/tmp/nocodb_import_$$" echo "0" > "$temp_file" # Add progress reporting for large datasets local progress_interval=25 if [[ $total_records -gt 200 ]]; then progress_interval=50 fi if [[ $total_records -gt 1000 ]]; then progress_interval=100 fi # Parse records and import them one by one (to handle potential ID conflicts) echo "$records_array" | jq -c '.[]' 2>/dev/null | while read -r record; do import_count=$((import_count + 1)) # Remove auto-generated and system columns that can cause conflicts local cleaned_record cleaned_record=$(echo "$record" | jq ' del(.Id) | del(.id) | del(.ID) | del(.CreatedAt) | del(.UpdatedAt) | del(.created_at) | del(.updated_at) | del(.ncRecordId) | del(.ncRecordHash) ' 2>/dev/null) if [[ -z "$cleaned_record" || "$cleaned_record" == "{}" || "$cleaned_record" == "null" ]]; then print_warning "Skipping empty record $import_count in $table_name" continue fi # Use a simpler call without the make_api_call wrapper for better error handling local response local http_code local retry_count=0 local max_retries=2 while [[ $retry_count -lt $max_retries ]]; do response=$(curl -s -w "%{http_code}" -X "POST" \ -H "xc-token: $NOCODB_API_TOKEN" \ -H "Content-Type: application/json" \ --max-time 60 \ -d "$cleaned_record" \ "$API_BASE_V2/tables/$table_id/records" 2>/dev/null) http_code="${response: -3}" # If successful, break out of retry loop if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then break fi retry_count=$((retry_count + 1)) if [[ $retry_count -lt $max_retries ]]; then print_warning "Import failed for record $import_count, retrying... (attempt $retry_count/$max_retries)" sleep 1 fi done if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then success_count=$(cat "$temp_file") success_count=$((success_count + 1)) echo "$success_count" > "$temp_file" # Show progress at intervals to avoid too much output if [[ $((import_count % progress_interval)) -eq 0 ]] || [[ $import_count -eq $total_records ]]; then print_status "✓ Imported $import_count/$total_records records ($(($success_count * 100 / import_count))% success rate)" fi else local response_body="${response%???}" print_warning "✗ Failed to import record $import_count/$total_records: $response_body" fi # Small delay between imports for large datasets to be API-friendly if [[ $total_records -gt 500 && $import_count -lt $total_records ]]; then sleep 0.1 fi done # Read final success count local final_success_count=$(cat "$temp_file" 2>/dev/null || echo "0") rm -f "$temp_file" print_success "Data import completed for table: $table_name ($final_success_count/$total_records records imported)" } # Function to prompt user for base selection select_source_base() { print_status "Fetching available bases for migration..." local bases_response bases_response=$(list_available_bases) if [[ $? -ne 0 ]]; then print_error "Could not fetch available bases" return 1 fi # Parse and display available bases local bases_info bases_info=$(echo "$bases_response" | jq -r '.list[] | "\(.id)|\(.title)|\(.description // "No description")"' 2>/dev/null) if [[ -z "$bases_info" ]]; then print_warning "No existing bases found for migration" return 1 fi # Try to detect current base from .env file local current_base_id="" if [[ -n "$NOCODB_VIEW_URL" ]]; then current_base_id=$(extract_base_id_from_url "$NOCODB_VIEW_URL") fi echo "" print_status "Available bases for data migration:" print_status "=====================================" local counter=1 local suggested_option="" echo "$bases_info" | while IFS='|' read -r base_id title description; do local marker="" if [[ "$base_id" == "$current_base_id" ]]; then marker=" ⭐ [CURRENT]" suggested_option="$counter" fi echo " $counter) $title$marker" echo " ID: $base_id" echo " Description: $description" echo "" counter=$((counter + 1)) done echo "" if [[ -n "$current_base_id" ]]; then print_warning "⭐ Detected current base from .env file (marked above)" echo -n "Enter the number of the base to migrate from (or 'skip'): " else echo -n "Enter the number of the base you want to migrate from (or 'skip'): " fi read -r selection if [[ "$selection" == "skip" ]]; then print_status "Skipping data migration" return 1 fi if ! [[ "$selection" =~ ^[0-9]+$ ]]; then print_error "Invalid selection. Please enter a number or 'skip'" return 1 fi # Get the selected base ID local selected_base_id selected_base_id=$(echo "$bases_info" | sed -n "${selection}p" | cut -d'|' -f1) if [[ -z "$selected_base_id" ]]; then print_error "Invalid selection" return 1 fi SOURCE_BASE_ID="$selected_base_id" print_success "Selected base ID: $SOURCE_BASE_ID" return 0 } # Function to select tables for migration select_migration_tables() { local source_base_id=$1 print_status "Fetching tables from source base..." local tables_response tables_response=$(list_base_tables "$source_base_id") if [[ $? -ne 0 ]]; then print_error "Could not fetch tables from source base" return 1 fi # Parse and display available tables local tables_info tables_info=$(echo "$tables_response" | jq -r '.list[] | "\(.id)|\(.title)|\(.table_name)"' 2>/dev/null) if [[ -z "$tables_info" ]]; then print_warning "No tables found in source base" return 1 fi echo "" print_status "Available tables in source base:" print_status "================================" local counter=1 echo "$tables_info" | while IFS='|' read -r table_id title table_name; do echo " $counter) $title ($table_name)" echo " Table ID: $table_id" echo "" counter=$((counter + 1)) done echo "" print_status "Select tables to migrate (comma-separated numbers, or 'all' for all tables):" echo -n "Selection: " read -r table_selection if [[ "$table_selection" == "all" ]]; then SOURCE_TABLE_IDS=$(echo "$tables_info" | cut -d'|' -f1 | tr '\n' ',' | sed 's/,$//') else local selected_ids="" IFS=',' read -ra selections <<< "$table_selection" for selection in "${selections[@]}"; do selection=$(echo "$selection" | xargs) # Trim whitespace if [[ "$selection" =~ ^[0-9]+$ ]]; then local table_id table_id=$(echo "$tables_info" | sed -n "${selection}p" | cut -d'|' -f1) if [[ -n "$table_id" ]]; then selected_ids="$selected_ids$table_id," fi fi done SOURCE_TABLE_IDS=$(echo "$selected_ids" | sed 's/,$//') fi if [[ -z "$SOURCE_TABLE_IDS" ]]; then print_error "No valid tables selected" return 1 fi print_success "Selected table IDs: $SOURCE_TABLE_IDS" return 0 } # Function to migrate data from source to destination migrate_table_data() { local source_base_id=$1 local dest_base_id=$2 local source_table_id=$3 local dest_table_id=$4 local table_name=$5 print_status "Migrating data from $table_name..." # Export data from source table local exported_data exported_data=$(export_table_data "$source_base_id" "$source_table_id" "$table_name") if [[ $? -ne 0 ]]; then print_error "Failed to export data from source table: $table_name" return 1 fi # Import data to destination table import_table_data "$dest_base_id" "$dest_table_id" "$table_name" "$exported_data" if [[ $? -eq 0 ]]; then print_success "Successfully migrated data for table: $table_name" return 0 else print_error "Failed to migrate data for table: $table_name" return 1 fi } # Function to create new project with timestamp create_new_project() { # Generate unique project name with timestamp local timestamp=$(date +"%Y%m%d_%H%M%S") local project_name="Map Viewer Project - $timestamp" # First test API connectivity if ! test_api_connectivity; then print_error "API connectivity test failed" exit 1 fi print_status "Creating new base: $project_name" print_warning "This script will create a new base and will NOT touch any existing data" local new_base_response new_base_response=$(create_project "$project_name") if [[ $? -eq 0 ]]; then local new_base_id new_base_id=$(echo "$new_base_response" | grep -o '"id":"[^"]*"' | head -1 | sed 's/"id":"//;s/"//') if [ -n "$new_base_id" ]; then print_success "Created new base '$project_name' with ID: $new_base_id" echo "$new_base_id" return 0 else print_error "Failed to extract base ID from response" exit 1 fi else print_error "Failed to create new base" exit 1 fi } # Function to create the main locations table create_locations_table() { local base_id=$1 local table_data='{ "table_name": "locations", "title": "Locations", "columns": [ { "column_name": "id", "title": "ID", "uidt": "ID", "pk": true, "ai": true, "rqd": true }, { "column_name": "geo_location", "title": "Geo-Location", "uidt": "GeoData", "rqd": false }, { "column_name": "latitude", "title": "latitude", "uidt": "Decimal", "rqd": false, "meta": { "precision": 8, "scale": 8 } }, { "column_name": "longitude", "title": "longitude", "uidt": "Decimal", "rqd": false, "meta": { "precision": 8, "scale": 8 } }, { "column_name": "first_name", "title": "First Name", "uidt": "SingleLineText", "rqd": false }, { "column_name": "last_name", "title": "Last Name", "uidt": "SingleLineText", "rqd": false }, { "column_name": "email", "title": "Email", "uidt": "Email", "rqd": false }, { "column_name": "phone", "title": "Phone", "uidt": "PhoneNumber", "rqd": false }, { "column_name": "unit_number", "title": "Unit Number", "uidt": "SingleLineText", "rqd": false }, { "column_name": "support_level", "title": "Support Level", "uidt": "SingleSelect", "rqd": false, "colOptions": { "options": [ {"title": "1", "color": "#4CAF50"}, {"title": "2", "color": "#FFEB3B"}, {"title": "3", "color": "#FF9800"}, {"title": "4", "color": "#F44336"} ] } }, { "column_name": "address", "title": "Address", "uidt": "SingleLineText", "rqd": false }, { "column_name": "sign", "title": "Sign", "uidt": "Checkbox", "rqd": false }, { "column_name": "sign_size", "title": "Sign Size", "uidt": "SingleSelect", "rqd": false, "colOptions": { "options": [ {"title": "Regular", "color": "#2196F3"}, {"title": "Large", "color": "#4CAF50"}, {"title": "Unsure", "color": "#FF9800"} ] } }, { "column_name": "notes", "title": "Notes", "uidt": "LongText", "rqd": false }, { "column_name": "created_by_user", "title": "created_by_user", "uidt": "SingleLineText", "rqd": false }, { "column_name": "last_updated_by_user", "title": "last_updated_by_user", "uidt": "SingleLineText", "rqd": false } ] }' create_table "$base_id" "locations" "$table_data" "Main locations table for map data" } # Function to create the login table create_login_table() { local base_id=$1 local table_data='{ "table_name": "login", "title": "Login", "columns": [ { "column_name": "id", "title": "ID", "uidt": "ID", "pk": true, "ai": true, "rqd": true }, { "column_name": "email", "title": "Email", "uidt": "Email", "rqd": true }, { "column_name": "password", "title": "Password", "uidt": "SingleLineText", "rqd": true }, { "column_name": "name", "title": "Name", "uidt": "SingleLineText", "rqd": false }, { "column_name": "phone", "title": "Phone", "uidt": "PhoneNumber", "rqd": false, "meta": { "validate": true } }, { "column_name": "admin", "title": "Admin", "uidt": "Checkbox", "rqd": false }, { "column_name": "UserType", "title": "User Type", "uidt": "SingleSelect", "rqd": false, "colOptions": { "options": [ {"title": "admin", "color": "#FF6B6B"}, {"title": "user", "color": "#4ECDC4"}, {"title": "temp", "color": "#FFE66D"} ] } }, { "column_name": "Created Via", "title": "Created Via", "uidt": "SingleSelect", "rqd": false, "colOptions": { "options": [ {"title": "admin", "color": "#2196F3"}, {"title": "public_shift_signup", "color": "#FF9800"}, {"title": "standard", "color": "#4CAF50"} ] } }, { "column_name": "ExpiresAt", "title": "Expires At", "uidt": "DateTime", "rqd": false }, { "column_name": "ExpireDays", "title": "Expire Days", "uidt": "Number", "rqd": false }, { "column_name": "created_at", "title": "Created At", "uidt": "DateTime", "rqd": false }, { "column_name": "last_login", "title": "Last Login", "uidt": "DateTime", "rqd": false } ] }' create_table "$base_id" "login" "$table_data" "User authentication table" } # Function to create the settings table create_settings_table() { local base_id=$1 local table_data='{ "table_name": "settings", "title": "Settings", "columns": [ { "column_name": "id", "title": "ID", "uidt": "ID", "pk": true, "ai": true, "rqd": true }, { "column_name": "created_at", "title": "created_at", "uidt": "DateTime", "rqd": false }, { "column_name": "created_by", "title": "created_by", "uidt": "SingleLineText", "rqd": false }, { "column_name": "geo_location", "title": "Geo-Location", "uidt": "SingleLineText", "rqd": false }, { "column_name": "latitude", "title": "latitude", "uidt": "Decimal", "rqd": false, "meta": { "precision": 8, "scale": 8 } }, { "column_name": "longitude", "title": "longitude", "uidt": "Decimal", "rqd": false, "meta": { "precision": 8, "scale": 8 } }, { "column_name": "zoom", "title": "zoom", "uidt": "Number", "rqd": false }, { "column_name": "walk_sheet_title", "title": "Walk Sheet Title", "uidt": "SingleLineText", "rqd": false }, { "column_name": "walk_sheet_subtitle", "title": "Walk Sheet Subtitle", "uidt": "SingleLineText", "rqd": false }, { "column_name": "walk_sheet_footer", "title": "Walk Sheet Footer", "uidt": "LongText", "rqd": false }, { "column_name": "qr_code_1_url", "title": "QR Code 1 URL", "uidt": "URL", "rqd": false }, { "column_name": "qr_code_1_label", "title": "QR Code 1 Label", "uidt": "SingleLineText", "rqd": false }, { "column_name": "qr_code_2_url", "title": "QR Code 2 URL", "uidt": "URL", "rqd": false }, { "column_name": "qr_code_2_label", "title": "QR Code 2 Label", "uidt": "SingleLineText", "rqd": false }, { "column_name": "qr_code_3_url", "title": "QR Code 3 URL", "uidt": "URL", "rqd": false }, { "column_name": "qr_code_3_label", "title": "QR Code 3 Label", "uidt": "SingleLineText", "rqd": false } ] }' create_table "$base_id" "settings" "$table_data" "System configuration with walk sheet text fields" } # Function to create the shifts table create_shifts_table() { local base_id=$1 local table_data='{ "table_name": "shifts", "title": "Shifts", "columns": [ { "column_name": "id", "title": "ID", "uidt": "ID", "pk": true, "ai": true, "rqd": true }, { "column_name": "title", "title": "Title", "uidt": "SingleLineText", "rqd": true }, { "column_name": "description", "title": "Description", "uidt": "LongText", "rqd": false }, { "column_name": "date", "title": "Date", "uidt": "Date", "rqd": true }, { "column_name": "start_time", "title": "Start Time", "uidt": "Time", "rqd": true }, { "column_name": "end_time", "title": "End Time", "uidt": "Time", "rqd": true }, { "column_name": "location", "title": "Location", "uidt": "SingleLineText", "rqd": false }, { "column_name": "max_volunteers", "title": "Max Volunteers", "uidt": "Number", "rqd": true }, { "column_name": "current_volunteers", "title": "Current Volunteers", "uidt": "Number", "rqd": false }, { "column_name": "status", "title": "Status", "uidt": "SingleSelect", "rqd": false, "colOptions": { "options": [ {"title": "Open", "color": "#4CAF50"}, {"title": "Full", "color": "#FF9800"}, {"title": "Cancelled", "color": "#F44336"} ] } }, { "column_name": "created_by", "title": "Created By", "uidt": "SingleLineText", "rqd": false }, { "column_name": "created_at", "title": "Created At", "uidt": "DateTime", "rqd": false }, { "column_name": "updated_at", "title": "Updated At", "uidt": "DateTime", "rqd": false }, { "column_name": "is_public", "title": "Is Public", "uidt": "Checkbox", "rqd": false } ] }' create_table "$base_id" "shifts" "$table_data" "shifts table" } # Function to create the shift signups table create_shift_signups_table() { local base_id=$1 local table_data='{ "table_name": "shift_signups", "title": "Shift Signups", "columns": [ { "column_name": "id", "title": "ID", "uidt": "ID", "pk": true, "ai": true, "rqd": true }, { "column_name": "shift_id", "title": "Shift ID", "uidt": "Number", "rqd": true }, { "column_name": "shift_title", "title": "Shift Title", "uidt": "SingleLineText", "rqd": false }, { "column_name": "user_email", "title": "User Email", "uidt": "Email", "rqd": true }, { "column_name": "user_name", "title": "User Name", "uidt": "SingleLineText", "rqd": false }, { "column_name": "user_phone", "title": "User Phone", "uidt": "PhoneNumber", "rqd": false }, { "column_name": "signup_date", "title": "Signup Date", "uidt": "DateTime", "rqd": false }, { "column_name": "status", "title": "Status", "uidt": "SingleSelect", "rqd": false, "colOptions": { "options": [ {"title": "Confirmed", "color": "#4CAF50"}, {"title": "Cancelled", "color": "#F44336"} ] } }, { "column_name": "signup_source", "title": "Signup Source", "uidt": "SingleSelect", "rqd": false, "colOptions": { "options": [ {"title": "authenticated", "color": "#2196F3"}, {"title": "public", "color": "#FF9800"}, {"title": "admin", "color": "#9C27B0"} ] } } ] }' create_table "$base_id" "shift_signups" "$table_data" "shift signups table" } # Function to create the cuts table create_cuts_table() { local base_id=$1 local table_data='{ "table_name": "cuts", "title": "Cuts", "columns": [ { "column_name": "id", "title": "ID", "uidt": "ID", "pk": true, "ai": true, "rqd": true }, { "column_name": "name", "title": "Name", "uidt": "SingleLineText", "rqd": true }, { "column_name": "description", "title": "Description", "uidt": "LongText", "rqd": false }, { "column_name": "color", "title": "Color", "uidt": "SingleLineText", "rqd": true, "cdf": "#3388ff" }, { "column_name": "opacity", "title": "Opacity", "uidt": "Decimal", "rqd": true, "cdf": "0.3", "meta": { "precision": 3, "scale": 2 } }, { "column_name": "category", "title": "Category", "uidt": "SingleSelect", "rqd": false, "colOptions": { "options": [ {"title": "Custom", "color": "#2196F3"}, {"title": "Ward", "color": "#4CAF50"}, {"title": "Neighborhood", "color": "#FF9800"}, {"title": "District", "color": "#9C27B0"} ] } }, { "column_name": "is_public", "title": "Public Visibility", "uidt": "Checkbox", "rqd": false, "cdf": false }, { "column_name": "is_official", "title": "Official Cut", "uidt": "Checkbox", "rqd": false, "cdf": false }, { "column_name": "geojson", "title": "GeoJSON Data", "uidt": "LongText", "rqd": true }, { "column_name": "bounds", "title": "Bounds", "uidt": "LongText", "rqd": false }, { "column_name": "created_by", "title": "Created By", "uidt": "SingleLineText", "rqd": false }, { "column_name": "created_at", "title": "Created At", "uidt": "DateTime", "rqd": false }, { "column_name": "updated_at", "title": "Updated At", "uidt": "DateTime", "rqd": false }, { "column_name": "show_locations", "title": "Show Locations", "uidt": "Checkbox", "rqd": false, "cdf": true }, { "column_name": "export_enabled", "title": "Export Enabled", "uidt": "Checkbox", "rqd": false, "cdf": true }, { "column_name": "assigned_to", "title": "Assigned To", "uidt": "SingleLineText", "rqd": false }, { "column_name": "filter_settings", "title": "Filter Settings", "uidt": "LongText", "rqd": false }, { "column_name": "last_canvassed", "title": "Last Canvassed", "uidt": "DateTime", "rqd": false }, { "column_name": "completion_percentage", "title": "Completion Percentage", "uidt": "Number", "rqd": false, "cdf": "0" } ] }' create_table "$base_id" "cuts" "$table_data" "Polygon cuts for map overlays" } # Function to create default admin user create_default_admin() { local base_id=$1 local login_table_id=$2 print_status "Creating default admin user..." local admin_data='{ "email": "admin@thebunkerops.ca", "password": "admin123", "name": "Administrator", "admin": true, "created_at": "'"$(date -u +"%Y-%m-%d %H:%M:%S")"'" }' make_api_call "POST" "/tables/$login_table_id/records" "$admin_data" "Creating default admin user" "v2" print_warning "Default admin user created:" print_warning " Email: admin@thebunkerops.ca" print_warning " Name: Administrator" print_warning " Admin: true" print_warning " Note: This is a simplified login table for demonstration." print_warning " You may need to implement proper authentication separately." } # Function to create default start location setting create_default_start_location() { local base_id=$1 local settings_table_id=$2 print_status "Creating default settings row with start location..." local start_location_data='{ "created_at": "'"$(date -u +"%Y-%m-%d %H:%M:%S")"'", "created_by": "system", "geo_location": "'"${DEFAULT_LAT:-53.5461}"';'"${DEFAULT_LNG:--113.4938}"'", "latitude": '"${DEFAULT_LAT:-53.5461}"', "longitude": '"${DEFAULT_LNG:--113.4938}"', "zoom": '"${DEFAULT_ZOOM:-11}"', "walk_sheet_title": "Campaign Walk Sheet", "walk_sheet_subtitle": "Door-to-Door Canvassing Form", "walk_sheet_footer": "Thank you for your participation in our campaign!", "qr_code_1_url": "https://example.com/signup", "qr_code_1_label": "Sign Up", "qr_code_2_url": "https://example.com/donate", "qr_code_2_label": "Donate", "qr_code_3_url": "https://example.com/volunteer", "qr_code_3_label": "Volunteer" }' make_api_call "POST" "/tables/$settings_table_id/records" "$start_location_data" "Creating default settings row" "v2" } # Function to update .env file with new table URLs update_env_file() { local base_id=$1 local locations_table_id=$2 local login_table_id=$3 local settings_table_id=$4 local shifts_table_id=$5 local shift_signups_table_id=$6 local cuts_table_id=$7 print_status "Updating .env file with new table URLs..." # Create backup of current .env file if [ -f ".env" ]; then cp ".env" ".env.backup.$(date +%Y%m%d_%H%M%S)" print_success "Backed up current .env file" fi # Construct the new URLs local new_locations_url="${BASE_URL}/dashboard/#/nc/${base_id}/${locations_table_id}" local new_login_url="${BASE_URL}/dashboard/#/nc/${base_id}/${login_table_id}" local new_settings_url="${BASE_URL}/dashboard/#/nc/${base_id}/${settings_table_id}" local new_shifts_url="${BASE_URL}/dashboard/#/nc/${base_id}/${shifts_table_id}" local new_shift_signups_url="${BASE_URL}/dashboard/#/nc/${base_id}/${shift_signups_table_id}" local new_cuts_url="${BASE_URL}/dashboard/#/nc/${base_id}/${cuts_table_id}" print_status "Updating URLs in .env file..." # Update each URL in the .env file if [ -f ".env" ]; then # Use sed to update each line, creating a temporary file sed -i.tmp "s|^NOCODB_VIEW_URL=.*|NOCODB_VIEW_URL=${new_locations_url}|" .env sed -i.tmp "s|^NOCODB_LOGIN_SHEET=.*|NOCODB_LOGIN_SHEET=${new_login_url}|" .env sed -i.tmp "s|^NOCODB_SETTINGS_SHEET=.*|NOCODB_SETTINGS_SHEET=${new_settings_url}|" .env sed -i.tmp "s|^NOCODB_SHIFTS_SHEET=.*|NOCODB_SHIFTS_SHEET=${new_shifts_url}|" .env sed -i.tmp "s|^NOCODB_SHIFT_SIGNUPS_SHEET=.*|NOCODB_SHIFT_SIGNUPS_SHEET=${new_shift_signups_url}|" .env sed -i.tmp "s|^NOCODB_CUTS_SHEET=.*|NOCODB_CUTS_SHEET=${new_cuts_url}|" .env # Remove the temporary file created by sed rm -f .env.tmp print_success "Updated .env file with new table URLs" print_status "" print_status "New URLs set in .env file:" print_status " NOCODB_VIEW_URL=${new_locations_url}" print_status " NOCODB_LOGIN_SHEET=${new_login_url}" print_status " NOCODB_SETTINGS_SHEET=${new_settings_url}" print_status " NOCODB_SHIFTS_SHEET=${new_shifts_url}" print_status " NOCODB_SHIFT_SIGNUPS_SHEET=${new_shift_signups_url}" print_status " NOCODB_CUTS_SHEET=${new_cuts_url}" else print_error "Could not find .env file to update" return 1 fi } # Function to extract base ID from URL extract_base_id_from_url() { local url="$1" echo "$url" | grep -o '/nc/[^/]*' | sed 's|/nc/||' } # Function to prompt user about data migration prompt_migration_choice() { print_status "NocoDB Auto-Setup - Migration Options" print_status "=====================================" echo "" print_status "This script will create a new NocoDB base with fresh tables." echo "" print_status "Migration Options:" print_status " 1) Fresh installation (create new base with default data)" print_status " 2) Migrate from existing base (preserve your current data)" echo "" # Check if we have existing URLs in .env to suggest migration if [[ -n "$NOCODB_VIEW_URL" ]]; then local current_base_id=$(extract_base_id_from_url "$NOCODB_VIEW_URL") print_warning "Detected existing base in .env: $current_base_id" print_warning "You may want to migrate data from your current base." fi echo "" echo -n "Choose option (1 or 2): " read -r choice case $choice in 1) print_status "Selected: Fresh installation" MIGRATE_DATA=false return 0 ;; 2) print_status "Selected: Data migration" MIGRATE_DATA=true return 0 ;; *) print_error "Invalid choice. Please enter 1 or 2." prompt_migration_choice ;; esac } # Main execution main() { # Parse command line arguments parse_arguments "$@" print_status "Starting NocoDB Auto-Setup..." print_status "================================" # Always prompt for migration choice unless --migrate-data was explicitly passed if [[ "$MIGRATE_DATA" != "true" ]]; then prompt_migration_choice fi # Handle data migration setup if requested if [[ "$MIGRATE_DATA" == "true" ]]; then print_status "" print_status "=== Data Migration Setup ===" if select_source_base; then if select_migration_tables "$SOURCE_BASE_ID"; then print_success "Migration setup completed" print_warning "Data will be migrated after creating the new base and tables" else print_warning "Table selection failed, proceeding without migration" MIGRATE_DATA=false fi else print_warning "Base selection failed, proceeding without migration" MIGRATE_DATA=false fi print_status "" fi # Always create a new project print_status "Creating new base..." print_warning "This script creates a NEW base and does NOT modify existing data" BASE_ID=$(create_new_project) if [ -z "$BASE_ID" ]; then print_error "Failed to create new base" exit 1 fi print_status "Working with new base ID: $BASE_ID" # Create tables print_status "Creating tables..." # Create locations table LOCATIONS_TABLE_ID=$(create_locations_table "$BASE_ID") # Create login table LOGIN_TABLE_ID=$(create_login_table "$BASE_ID") # Create settings table SETTINGS_TABLE_ID=$(create_settings_table "$BASE_ID") # Create shifts table SHIFTS_TABLE_ID=$(create_shifts_table "$BASE_ID") # Create shift signups table SHIFT_SIGNUPS_TABLE_ID=$(create_shift_signups_table "$BASE_ID") # Create cuts table CUTS_TABLE_ID=$(create_cuts_table "$BASE_ID") # Wait a moment for tables to be fully created sleep 3 # Handle data migration if enabled if [[ "$MIGRATE_DATA" == "true" && -n "$SOURCE_BASE_ID" && -n "$SOURCE_TABLE_IDS" ]]; then print_status "================================" print_status "Starting data migration..." print_status "================================" # Create mapping of table names to new table IDs declare -A new_table_map=( ["locations"]="$LOCATIONS_TABLE_ID" ["login"]="$LOGIN_TABLE_ID" ["settings"]="$SETTINGS_TABLE_ID" ["shifts"]="$SHIFTS_TABLE_ID" ["shift_signups"]="$SHIFT_SIGNUPS_TABLE_ID" ["cuts"]="$CUTS_TABLE_ID" ) # Get source table information local source_tables_response source_tables_response=$(list_base_tables "$SOURCE_BASE_ID") # Migrate each selected table IFS=',' read -ra table_ids <<< "$SOURCE_TABLE_IDS" for source_table_id in "${table_ids[@]}"; do # Get table name from source local table_info table_info=$(echo "$source_tables_response" | jq -r ".list[] | select(.id == \"$source_table_id\") | .table_name" 2>/dev/null) if [[ -n "$table_info" && -n "${new_table_map[$table_info]}" ]]; then migrate_table_data "$SOURCE_BASE_ID" "$BASE_ID" "$source_table_id" "${new_table_map[$table_info]}" "$table_info" else print_warning "Skipping migration for unknown table: $table_info (ID: $source_table_id)" fi done print_status "================================" print_success "Data migration completed!" print_status "================================" else # Create default data only if not migrating print_status "Setting up default data..." # Create default admin user create_default_admin "$BASE_ID" "$LOGIN_TABLE_ID" # Create default settings row (includes both start location and walk sheet config) create_default_start_location "$BASE_ID" "$SETTINGS_TABLE_ID" fi # Update .env file with new table URLs update_env_file "$BASE_ID" "$LOCATIONS_TABLE_ID" "$LOGIN_TABLE_ID" "$SETTINGS_TABLE_ID" "$SHIFTS_TABLE_ID" "$SHIFT_SIGNUPS_TABLE_ID" "$CUTS_TABLE_ID" print_status "================================" print_success "NocoDB Auto-Setup completed successfully!" print_status "================================" print_status "Base ID: $BASE_ID" print_status "" print_status "Next steps:" print_status "1. Login to your NocoDB instance at: $BASE_URL" print_status "2. Your .env file has been automatically updated with the new table URLs!" if [[ "$MIGRATE_DATA" == "true" && -n "$SOURCE_BASE_ID" && -n "$SOURCE_TABLE_IDS" ]]; then print_status "3. Your existing data has been migrated to the new base!" print_status "4. Review the migrated data and verify everything transferred correctly" print_status "5. If you had custom admin users, you may need to update passwords" else print_status "3. The default admin user is: admin@thebunkerops.ca with password: admin123" print_status "4. IMPORTANT: Change the default password after first login!" print_status "5. Start adding your location data!" fi print_warning "" print_warning "IMPORTANT: This script created a NEW base. Your existing data was NOT modified." print_warning "Your .env file has been automatically updated with the new table URLs." print_warning "A backup of your previous .env file was created with a timestamp." if [[ "$MIGRATE_DATA" != "true" ]]; then print_warning "SECURITY: Change the default admin password immediately after first login!" fi if [[ "$MIGRATE_DATA" == "true" ]]; then print_warning "DATA MIGRATION: Verify all migrated data is correct before using in production!" print_warning "The original base remains unchanged as a backup." fi } # Check if script is being run directly if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then main "$@" fi