diff --git a/map/README.md b/map/README.md
index 6917d9f..d88c17b 100644
--- a/map/README.md
+++ b/map/README.md
@@ -143,6 +143,11 @@
./build-nocodb.sh
```
+ **For migrating from an existing NocoDB base:**
+ ```bash
+ ./build-nocodb.sh --migrate-data
+ ```
+
This creates six tables:
- **Locations** - Main map data with geo-location, contact info, support levels
- **Login** - User authentication (email, name, admin flag)
@@ -151,6 +156,49 @@
- **Shift Signups** - User shift registrations
- **Cuts** - Geographic polygon overlays for map regions
+ ### Data Migration Options
+
+ The build script supports data migration from existing NocoDB bases:
+
+ **Interactive Mode (Default):**
+ ```bash
+ ./build-nocodb.sh
+ ```
+ - Prompts you to choose between fresh installation or data migration
+ - Automatically detects current base from .env file
+ - Provides guided setup with clear options
+
+ **Fresh Installation:**
+ - Creates new base with sample data
+ - Sets up default admin user (admin@thebunkerops.ca / admin123)
+ - Configures default settings
+
+ **Migration from Existing Base:**
+ ```bash
+ ./build-nocodb.sh --migrate-data # Skip prompt, go direct to migration
+ ```
+ - Lists all available bases in your NocoDB instance
+ - Highlights current base from .env file for easy selection
+ - Allows you to select source base for migration
+ - Choose specific tables to migrate (locations, login, settings, etc.)
+ - Filters out auto-generated columns to prevent conflicts
+ - Preserves your existing data while updating to new schema
+ - Original base remains unchanged as backup
+
+ **Migration Process:**
+ 1. Script displays available bases with IDs and descriptions
+ 2. Select source base by entering the corresponding number
+ 3. Choose tables to migrate (comma-separated numbers or 'all')
+ 4. Data is exported from source and imported to new base
+ 5. .env file automatically updated with new URLs
+
+ **Important Migration Notes:**
+ - ✅ Original data remains untouched (creates new base)
+ - ✅ Auto-generates new IDs to prevent conflicts
+ - ✅ Validates table structure compatibility
+ - ⚠️ Review migrated data before using in production
+ - ⚠️ Existing admin passwords may need to be reset
+
4. **Get Table URLs**
After the script completes:
diff --git a/map/app/controllers/usersController.js b/map/app/controllers/usersController.js
index 2b389fb..799eca5 100644
--- a/map/app/controllers/usersController.js
+++ b/map/app/controllers/usersController.js
@@ -56,7 +56,7 @@ class UsersController {
async create(req, res) {
try {
- const { email, password, name, isAdmin, userType, expireDays } = req.body;
+ const { email, password, name, phone, isAdmin, userType, expireDays } = req.body;
if (!email || !password) {
return res.status(400).json({
@@ -98,6 +98,8 @@ class UsersController {
password: password,
Name: name || '',
name: name || '',
+ Phone: phone || '',
+ phone: phone || '',
Admin: isAdmin === true,
admin: isAdmin === true,
'User Type': userType || 'user', // Handle space in field name
@@ -121,6 +123,7 @@ class UsersController {
ID: extractId(response),
Email: email,
Name: name,
+ Phone: phone,
Admin: isAdmin,
'User Type': userType, // Handle space in field name
UserType: userType,
@@ -157,6 +160,7 @@ class UsersController {
id: extractId(response),
email: email,
name: name,
+ phone: phone,
admin: isAdmin,
userType: userType,
expiresAt: expiresAt
diff --git a/map/app/public/admin.html b/map/app/public/admin.html
index 0f7c240..2f741a2 100644
--- a/map/app/public/admin.html
+++ b/map/app/public/admin.html
@@ -23,10 +23,12 @@
-
`).join('');
@@ -137,6 +150,14 @@ function setupVolunteerActionListeners() {
const volunteerId = e.target.getAttribute('data-volunteer-id');
const volunteerEmail = e.target.getAttribute('data-volunteer-email');
removeVolunteerFromShift(volunteerId, volunteerEmail);
+ } else if (e.target.classList.contains('sms-volunteer-btn')) {
+ const volunteerEmail = e.target.getAttribute('data-volunteer-email');
+ const volunteerName = e.target.getAttribute('data-volunteer-name');
+ openVolunteerSMS(volunteerEmail, volunteerName);
+ } else if (e.target.classList.contains('call-volunteer-btn')) {
+ const volunteerEmail = e.target.getAttribute('data-volunteer-email');
+ const volunteerName = e.target.getAttribute('data-volunteer-name');
+ callVolunteer(volunteerEmail, volunteerName);
}
});
}
@@ -177,6 +198,9 @@ async function addUserToShift() {
try {
await refreshCurrentShiftData();
console.log('Refreshed shift data after adding user');
+
+ // Also update the modal title to reflect new volunteer count
+ updateModalTitle();
} catch (refreshError) {
console.error('Error during refresh after adding user:', refreshError);
// Still show success since the add operation worked
@@ -216,6 +240,9 @@ async function removeVolunteerFromShift(volunteerId, volunteerEmail) {
try {
await refreshCurrentShiftData();
console.log('Refreshed shift data after removing volunteer');
+
+ // Also update the modal title to reflect new volunteer count
+ updateModalTitle();
} catch (refreshError) {
console.error('Error during refresh after removing volunteer:', refreshError);
// Still show success since the remove operation worked
@@ -272,13 +299,27 @@ function updateShiftInList(updatedShift) {
if (shiftItem) {
const signupCount = updatedShift.signups ? updatedShift.signups.length : 0;
+ // Generate list of first names for volunteers (same logic as displayAdminShifts)
+ const firstNames = updatedShift.signups ? updatedShift.signups.map(volunteer => {
+ const fullName = volunteer['User Name'] || volunteer['User Email'] || 'Unknown';
+ // Extract first name (everything before first space, or email username if no space)
+ const firstName = fullName.includes(' ') ? fullName.split(' ')[0] :
+ fullName.includes('@') ? fullName.split('@')[0] : fullName;
+ return safeAdminCore('escapeHtml', firstName) || firstName;
+ }).slice(0, 8) : []; // Limit to first 8 names to avoid overflow
+
+ const namesDisplay = firstNames.length > 0 ?
+ `(${firstNames.join(', ')}${firstNames.length === 8 && signupCount > 8 ? '...' : ''})` :
+ '';
+
// Find the volunteer count paragraph (contains 👥)
const volunteerCountElement = Array.from(shiftItem.querySelectorAll('p')).find(p =>
- p.textContent.includes('👥')
+ p.textContent.includes('👥') || p.classList.contains('volunteer-count')
);
if (volunteerCountElement) {
- volunteerCountElement.textContent = `👥 ${signupCount}/${updatedShift['Max Volunteers']} volunteers`;
+ volunteerCountElement.innerHTML = `👥 ${signupCount}/${updatedShift['Max Volunteers']} volunteers ${namesDisplay}`;
+ volunteerCountElement.className = 'volunteer-count'; // Ensure class is set
}
// Update the data attribute with new shift data
@@ -290,6 +331,32 @@ function updateShiftInList(updatedShift) {
}
}
+// Update modal title with current volunteer count
+function updateModalTitle() {
+ if (!currentShiftData) return;
+
+ const modalTitle = document.getElementById('modal-shift-title');
+ const modalDetails = document.getElementById('modal-shift-details');
+
+ if (modalTitle) {
+ const signupCount = currentShiftData.signups ? currentShiftData.signups.length : 0;
+ modalTitle.textContent = `Manage Volunteers - ${currentShiftData.Title} (${signupCount}/${currentShiftData['Max Volunteers']})`;
+ }
+
+ if (modalDetails) {
+ const shiftDate = safeAdminCore('createLocalDate', currentShiftData.Date);
+ const dateStr = shiftDate ? shiftDate.toLocaleDateString() : currentShiftData.Date;
+ const signupCount = currentShiftData.signups ? currentShiftData.signups.length : 0;
+
+ modalDetails.innerHTML = `
+
@@ -222,6 +243,7 @@ async function createUser(e) {
const emailInput = document.getElementById('user-email');
const passwordInput = document.getElementById('user-password');
const nameInput = document.getElementById('user-name');
+ const phoneInput = document.getElementById('user-phone');
const userTypeSelect = document.getElementById('user-type');
const expireDaysInput = document.getElementById('user-expire-days');
const adminCheckbox = document.getElementById('user-is-admin');
@@ -229,6 +251,7 @@ async function createUser(e) {
const email = emailInput?.value.trim();
const password = passwordInput?.value;
const name = nameInput?.value.trim();
+ const phone = phoneInput?.value.trim();
const userType = userTypeSelect?.value;
const expireDays = userType === 'temp' ?
parseInt(expireDaysInput?.value) : null;
@@ -254,6 +277,7 @@ async function createUser(e) {
email,
password,
name: name || '',
+ phone: phone || '',
isAdmin: userType === 'admin' || admin,
userType,
expireDays
diff --git a/map/build-nocodb.sh b/map/build-nocodb.sh
index 4d9873c..276c8fa 100755
--- a/map/build-nocodb.sh
+++ b/map/build-nocodb.sh
@@ -12,10 +12,19 @@
# 5. shift_signups - Table for tracking signups to shifts with source tracking and phone numbers
# 6. cuts - Table for storing polygon overlays for the map
#
-# Updated: August 2025 - Added public shift support, signup source tracking, phone numbers
+# Updated: September 2025 - Added data migration option from existing NocoDB bases
+# Usage:
+# ./build-nocodb.sh # Create new base only
+# ./build-nocodb.sh --migrate-data # Create new base with data migration option
+# ./build-nocodb.sh --help # Show usage information
set -e # Exit on any error
+# Global variables for migration
+MIGRATE_DATA=false
+SOURCE_BASE_ID=""
+SOURCE_TABLE_IDS=""
+
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
@@ -40,6 +49,62 @@ print_error() {
echo -e "${RED}[ERROR]${NC} $1" >&2
}
+# Function to show usage information
+show_usage() {
+ cat << EOF
+NocoDB Auto-Setup Script
+
+USAGE:
+ $0 [OPTIONS]
+
+OPTIONS:
+ --migrate-data Skip interactive prompt and enable data migration mode
+ --help Show this help message
+
+DESCRIPTION:
+ This script creates a new NocoDB base with the required tables for the Map Viewer application.
+
+ Interactive mode (default): Prompts you to choose between fresh installation or data migration.
+
+ With --migrate-data option, skips the prompt and goes directly to migration setup, allowing
+ you to select an existing base and migrate data from specific tables to the new base.
+
+EXAMPLES:
+ $0 # Interactive mode - choose fresh or migration
+ $0 --migrate-data # Skip prompt, go directly to migration setup
+ $0 --help # Show this help
+
+MIGRATION FEATURES:
+ - Automatically detects current base from .env file settings
+ - Interactive base and table selection with clear guidance
+ - Filters out auto-generated columns (CreatedAt, UpdatedAt, etc.)
+ - Preserves original data (creates new base, doesn't modify existing)
+ - Progress tracking during import with detailed success/failure reporting
+
+EOF
+}
+
+# Parse command line arguments
+parse_arguments() {
+ while [[ $# -gt 0 ]]; do
+ case $1 in
+ --migrate-data)
+ MIGRATE_DATA=true
+ shift
+ ;;
+ --help)
+ show_usage
+ exit 0
+ ;;
+ *)
+ print_error "Unknown option: $1"
+ show_usage
+ exit 1
+ ;;
+ esac
+ done
+}
+
# Load environment variables
if [ -f ".env" ]; then
# Use set -a to automatically export variables
@@ -58,6 +123,33 @@ if [ -z "$NOCODB_API_URL" ] || [ -z "$NOCODB_API_TOKEN" ]; then
exit 1
fi
+# Check for required dependencies
+check_dependencies() {
+ local missing_deps=()
+
+ # Check for jq (required for JSON parsing in migration)
+ if ! command -v jq &> /dev/null; then
+ missing_deps+=("jq")
+ fi
+
+ # Check for curl (should be available but let's verify)
+ if ! command -v curl &> /dev/null; then
+ missing_deps+=("curl")
+ fi
+
+ if [[ ${#missing_deps[@]} -gt 0 ]]; then
+ print_error "Missing required dependencies: ${missing_deps[*]}"
+ print_error "Please install the missing dependencies before running this script"
+ print_status "On Ubuntu/Debian: sudo apt-get install ${missing_deps[*]}"
+ print_status "On CentOS/RHEL: sudo yum install ${missing_deps[*]}"
+ print_status "On macOS: brew install ${missing_deps[*]}"
+ exit 1
+ fi
+}
+
+# Check dependencies
+check_dependencies
+
# Extract base URL from API URL and set up v2 API endpoints
BASE_URL=$(echo "$NOCODB_API_URL" | sed 's|/api/v1||')
API_BASE_V1="$NOCODB_API_URL"
@@ -205,6 +297,332 @@ test_api_connectivity() {
fi
}
+# Function to list all available bases
+list_available_bases() {
+ print_status "Fetching available NocoDB bases..."
+
+ local response
+ response=$(make_api_call "GET" "/meta/bases" "" "Fetching bases list" "v2")
+
+ if [[ $? -eq 0 && -n "$response" ]]; then
+ echo "$response"
+ return 0
+ else
+ print_error "Failed to fetch bases list"
+ return 1
+ fi
+}
+
+# Function to list tables in a specific base
+list_base_tables() {
+ local base_id=$1
+
+ print_status "Fetching tables for base: $base_id"
+
+ local response
+ response=$(make_api_call "GET" "/meta/bases/$base_id/tables" "" "Fetching tables list" "v2")
+
+ if [[ $? -eq 0 && -n "$response" ]]; then
+ echo "$response"
+ return 0
+ else
+ print_error "Failed to fetch tables list for base: $base_id"
+ return 1
+ fi
+}
+
+# Function to export data from a table
+export_table_data() {
+ local base_id=$1
+ local table_id=$2
+ local table_name=$3
+ local limit=${4:-1000} # Default limit of 1000 records
+
+ print_status "Exporting data from table: $table_name (ID: $table_id)"
+
+ local response
+ response=$(make_api_call "GET" "/tables/$table_id/records?limit=$limit" "" "Exporting data from $table_name" "v2")
+
+ if [[ $? -eq 0 && -n "$response" ]]; then
+ echo "$response"
+ return 0
+ else
+ print_error "Failed to export data from table: $table_name"
+ return 1
+ fi
+}
+
+# Function to import data into a table
+import_table_data() {
+ local base_id=$1
+ local table_id=$2
+ local table_name=$3
+ local data=$4
+
+ # Check if data contains records
+ local record_count=$(echo "$data" | grep -o '"list":\[' | wc -l)
+
+ if [[ $record_count -eq 0 ]]; then
+ print_warning "No records found in source table: $table_name"
+ return 0
+ fi
+
+ # Extract the records array from the response
+ local records_array
+ records_array=$(echo "$data" | jq -r '.list' 2>/dev/null)
+
+ if [[ -z "$records_array" || "$records_array" == "[]" || "$records_array" == "null" ]]; then
+ print_warning "No records to import for table: $table_name"
+ return 0
+ fi
+
+ print_status "Importing data into table: $table_name (ID: $table_id)"
+
+ # Count total records first
+ local total_records
+ total_records=$(echo "$records_array" | jq 'length' 2>/dev/null)
+ print_status "Found $total_records records to import"
+
+ local import_count=0
+ local success_count=0
+
+ # Create temporary file to track results across subshell
+ local temp_file="/tmp/nocodb_import_$$"
+ echo "0" > "$temp_file"
+
+ # Parse records and import them one by one (to handle potential ID conflicts)
+ echo "$records_array" | jq -c '.[]' 2>/dev/null | while read -r record; do
+ import_count=$((import_count + 1))
+
+ # Remove auto-generated and system columns that can cause conflicts
+ local cleaned_record
+ cleaned_record=$(echo "$record" | jq '
+ del(.Id) |
+ del(.id) |
+ del(.ID) |
+ del(.CreatedAt) |
+ del(.UpdatedAt) |
+ del(.created_at) |
+ del(.updated_at) |
+ del(.ncRecordId) |
+ del(.ncRecordHash)
+ ' 2>/dev/null)
+
+ if [[ -z "$cleaned_record" || "$cleaned_record" == "{}" || "$cleaned_record" == "null" ]]; then
+ print_warning "Skipping empty record $import_count in $table_name"
+ continue
+ fi
+
+ # Use a simpler call without the make_api_call wrapper for better error handling
+ local response
+ local http_code
+
+ response=$(curl -s -w "%{http_code}" -X "POST" \
+ -H "xc-token: $NOCODB_API_TOKEN" \
+ -H "Content-Type: application/json" \
+ --max-time 30 \
+ -d "$cleaned_record" \
+ "$API_BASE_V2/tables/$table_id/records" 2>/dev/null)
+
+ http_code="${response: -3}"
+
+ if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then
+ success_count=$(cat "$temp_file")
+ success_count=$((success_count + 1))
+ echo "$success_count" > "$temp_file"
+ print_status "✓ Imported record $import_count/$total_records"
+ else
+ local response_body="${response%???}"
+ print_warning "✗ Failed to import record $import_count/$total_records: $response_body"
+ fi
+ done
+
+ # Read final success count
+ local final_success_count=$(cat "$temp_file" 2>/dev/null || echo "0")
+ rm -f "$temp_file"
+
+ print_success "Data import completed for table: $table_name ($final_success_count/$total_records records imported)"
+}
+
+# Function to prompt user for base selection
+select_source_base() {
+ print_status "Fetching available bases for migration..."
+
+ local bases_response
+ bases_response=$(list_available_bases)
+
+ if [[ $? -ne 0 ]]; then
+ print_error "Could not fetch available bases"
+ return 1
+ fi
+
+ # Parse and display available bases
+ local bases_info
+ bases_info=$(echo "$bases_response" | jq -r '.list[] | "\(.id)|\(.title)|\(.description // "No description")"' 2>/dev/null)
+
+ if [[ -z "$bases_info" ]]; then
+ print_warning "No existing bases found for migration"
+ return 1
+ fi
+
+ # Try to detect current base from .env file
+ local current_base_id=""
+ if [[ -n "$NOCODB_VIEW_URL" ]]; then
+ current_base_id=$(extract_base_id_from_url "$NOCODB_VIEW_URL")
+ fi
+
+ echo ""
+ print_status "Available bases for data migration:"
+ print_status "====================================="
+
+ local counter=1
+ local suggested_option=""
+
+ echo "$bases_info" | while IFS='|' read -r base_id title description; do
+ local marker=""
+ if [[ "$base_id" == "$current_base_id" ]]; then
+ marker=" ⭐ [CURRENT]"
+ suggested_option="$counter"
+ fi
+ echo " $counter) $title$marker"
+ echo " ID: $base_id"
+ echo " Description: $description"
+ echo ""
+ counter=$((counter + 1))
+ done
+
+ echo ""
+ if [[ -n "$current_base_id" ]]; then
+ print_warning "⭐ Detected current base from .env file (marked above)"
+ echo -n "Enter the number of the base to migrate from (or 'skip'): "
+ else
+ echo -n "Enter the number of the base you want to migrate from (or 'skip'): "
+ fi
+
+ read -r selection
+
+ if [[ "$selection" == "skip" ]]; then
+ print_status "Skipping data migration"
+ return 1
+ fi
+
+ if ! [[ "$selection" =~ ^[0-9]+$ ]]; then
+ print_error "Invalid selection. Please enter a number or 'skip'"
+ return 1
+ fi
+
+ # Get the selected base ID
+ local selected_base_id
+ selected_base_id=$(echo "$bases_info" | sed -n "${selection}p" | cut -d'|' -f1)
+
+ if [[ -z "$selected_base_id" ]]; then
+ print_error "Invalid selection"
+ return 1
+ fi
+
+ SOURCE_BASE_ID="$selected_base_id"
+ print_success "Selected base ID: $SOURCE_BASE_ID"
+ return 0
+}
+
+# Function to select tables for migration
+select_migration_tables() {
+ local source_base_id=$1
+
+ print_status "Fetching tables from source base..."
+
+ local tables_response
+ tables_response=$(list_base_tables "$source_base_id")
+
+ if [[ $? -ne 0 ]]; then
+ print_error "Could not fetch tables from source base"
+ return 1
+ fi
+
+ # Parse and display available tables
+ local tables_info
+ tables_info=$(echo "$tables_response" | jq -r '.list[] | "\(.id)|\(.title)|\(.table_name)"' 2>/dev/null)
+
+ if [[ -z "$tables_info" ]]; then
+ print_warning "No tables found in source base"
+ return 1
+ fi
+
+ echo ""
+ print_status "Available tables in source base:"
+ print_status "================================"
+
+ local counter=1
+ echo "$tables_info" | while IFS='|' read -r table_id title table_name; do
+ echo " $counter) $title ($table_name)"
+ echo " Table ID: $table_id"
+ echo ""
+ counter=$((counter + 1))
+ done
+
+ echo ""
+ print_status "Select tables to migrate (comma-separated numbers, or 'all' for all tables):"
+ echo -n "Selection: "
+ read -r table_selection
+
+ if [[ "$table_selection" == "all" ]]; then
+ SOURCE_TABLE_IDS=$(echo "$tables_info" | cut -d'|' -f1 | tr '\n' ',' | sed 's/,$//')
+ else
+ local selected_ids=""
+ IFS=',' read -ra selections <<< "$table_selection"
+ for selection in "${selections[@]}"; do
+ selection=$(echo "$selection" | xargs) # Trim whitespace
+ if [[ "$selection" =~ ^[0-9]+$ ]]; then
+ local table_id
+ table_id=$(echo "$tables_info" | sed -n "${selection}p" | cut -d'|' -f1)
+ if [[ -n "$table_id" ]]; then
+ selected_ids="$selected_ids$table_id,"
+ fi
+ fi
+ done
+ SOURCE_TABLE_IDS=$(echo "$selected_ids" | sed 's/,$//')
+ fi
+
+ if [[ -z "$SOURCE_TABLE_IDS" ]]; then
+ print_error "No valid tables selected"
+ return 1
+ fi
+
+ print_success "Selected table IDs: $SOURCE_TABLE_IDS"
+ return 0
+}
+
+# Function to migrate data from source to destination
+migrate_table_data() {
+ local source_base_id=$1
+ local dest_base_id=$2
+ local source_table_id=$3
+ local dest_table_id=$4
+ local table_name=$5
+
+ print_status "Migrating data from $table_name..."
+
+ # Export data from source table
+ local exported_data
+ exported_data=$(export_table_data "$source_base_id" "$source_table_id" "$table_name")
+
+ if [[ $? -ne 0 ]]; then
+ print_error "Failed to export data from source table: $table_name"
+ return 1
+ fi
+
+ # Import data to destination table
+ import_table_data "$dest_base_id" "$dest_table_id" "$table_name" "$exported_data"
+
+ if [[ $? -eq 0 ]]; then
+ print_success "Successfully migrated data for table: $table_name"
+ return 0
+ else
+ print_error "Failed to migrate data for table: $table_name"
+ return 1
+ fi
+}
+
# Function to create new project with timestamp
create_new_project() {
# Generate unique project name with timestamp
@@ -410,6 +828,15 @@ create_login_table() {
"uidt": "SingleLineText",
"rqd": false
},
+ {
+ "column_name": "phone",
+ "title": "Phone",
+ "uidt": "PhoneNumber",
+ "rqd": false,
+ "meta": {
+ "validate": true
+ }
+ },
{
"column_name": "admin",
"title": "Admin",
@@ -1038,11 +1465,86 @@ update_env_file() {
fi
}
+# Function to extract base ID from URL
+extract_base_id_from_url() {
+ local url="$1"
+ echo "$url" | grep -o '/nc/[^/]*' | sed 's|/nc/||'
+}
+
+# Function to prompt user about data migration
+prompt_migration_choice() {
+ print_status "NocoDB Auto-Setup - Migration Options"
+ print_status "====================================="
+ echo ""
+ print_status "This script will create a new NocoDB base with fresh tables."
+ echo ""
+ print_status "Migration Options:"
+ print_status " 1) Fresh installation (create new base with default data)"
+ print_status " 2) Migrate from existing base (preserve your current data)"
+ echo ""
+
+ # Check if we have existing URLs in .env to suggest migration
+ if [[ -n "$NOCODB_VIEW_URL" ]]; then
+ local current_base_id=$(extract_base_id_from_url "$NOCODB_VIEW_URL")
+ print_warning "Detected existing base in .env: $current_base_id"
+ print_warning "You may want to migrate data from your current base."
+ fi
+
+ echo ""
+ echo -n "Choose option (1 or 2): "
+ read -r choice
+
+ case $choice in
+ 1)
+ print_status "Selected: Fresh installation"
+ MIGRATE_DATA=false
+ return 0
+ ;;
+ 2)
+ print_status "Selected: Data migration"
+ MIGRATE_DATA=true
+ return 0
+ ;;
+ *)
+ print_error "Invalid choice. Please enter 1 or 2."
+ prompt_migration_choice
+ ;;
+ esac
+}
+
# Main execution
main() {
+ # Parse command line arguments
+ parse_arguments "$@"
+
print_status "Starting NocoDB Auto-Setup..."
print_status "================================"
+ # Always prompt for migration choice unless --migrate-data was explicitly passed
+ if [[ "$MIGRATE_DATA" != "true" ]]; then
+ prompt_migration_choice
+ fi
+
+ # Handle data migration setup if requested
+ if [[ "$MIGRATE_DATA" == "true" ]]; then
+ print_status ""
+ print_status "=== Data Migration Setup ==="
+
+ if select_source_base; then
+ if select_migration_tables "$SOURCE_BASE_ID"; then
+ print_success "Migration setup completed"
+ print_warning "Data will be migrated after creating the new base and tables"
+ else
+ print_warning "Table selection failed, proceeding without migration"
+ MIGRATE_DATA=false
+ fi
+ else
+ print_warning "Base selection failed, proceeding without migration"
+ MIGRATE_DATA=false
+ fi
+ print_status ""
+ fi
+
# Always create a new project
print_status "Creating new base..."
print_warning "This script creates a NEW base and does NOT modify existing data"
@@ -1079,14 +1581,53 @@ main() {
# Wait a moment for tables to be fully created
sleep 3
- # Create default data
- print_status "Setting up default data..."
-
- # Create default admin user
- create_default_admin "$BASE_ID" "$LOGIN_TABLE_ID"
-
- # Create default settings row (includes both start location and walk sheet config)
- create_default_start_location "$BASE_ID" "$SETTINGS_TABLE_ID"
+ # Handle data migration if enabled
+ if [[ "$MIGRATE_DATA" == "true" && -n "$SOURCE_BASE_ID" && -n "$SOURCE_TABLE_IDS" ]]; then
+ print_status "================================"
+ print_status "Starting data migration..."
+ print_status "================================"
+
+ # Create mapping of table names to new table IDs
+ declare -A new_table_map=(
+ ["locations"]="$LOCATIONS_TABLE_ID"
+ ["login"]="$LOGIN_TABLE_ID"
+ ["settings"]="$SETTINGS_TABLE_ID"
+ ["shifts"]="$SHIFTS_TABLE_ID"
+ ["shift_signups"]="$SHIFT_SIGNUPS_TABLE_ID"
+ ["cuts"]="$CUTS_TABLE_ID"
+ )
+
+ # Get source table information
+ local source_tables_response
+ source_tables_response=$(list_base_tables "$SOURCE_BASE_ID")
+
+ # Migrate each selected table
+ IFS=',' read -ra table_ids <<< "$SOURCE_TABLE_IDS"
+ for source_table_id in "${table_ids[@]}"; do
+ # Get table name from source
+ local table_info
+ table_info=$(echo "$source_tables_response" | jq -r ".list[] | select(.id == \"$source_table_id\") | .table_name" 2>/dev/null)
+
+ if [[ -n "$table_info" && -n "${new_table_map[$table_info]}" ]]; then
+ migrate_table_data "$SOURCE_BASE_ID" "$BASE_ID" "$source_table_id" "${new_table_map[$table_info]}" "$table_info"
+ else
+ print_warning "Skipping migration for unknown table: $table_info (ID: $source_table_id)"
+ fi
+ done
+
+ print_status "================================"
+ print_success "Data migration completed!"
+ print_status "================================"
+ else
+ # Create default data only if not migrating
+ print_status "Setting up default data..."
+
+ # Create default admin user
+ create_default_admin "$BASE_ID" "$LOGIN_TABLE_ID"
+
+ # Create default settings row (includes both start location and walk sheet config)
+ create_default_start_location "$BASE_ID" "$SETTINGS_TABLE_ID"
+ fi
# Update .env file with new table URLs
update_env_file "$BASE_ID" "$LOCATIONS_TABLE_ID" "$LOGIN_TABLE_ID" "$SETTINGS_TABLE_ID" "$SHIFTS_TABLE_ID" "$SHIFT_SIGNUPS_TABLE_ID" "$CUTS_TABLE_ID"
@@ -1100,15 +1641,30 @@ main() {
print_status "Next steps:"
print_status "1. Login to your NocoDB instance at: $BASE_URL"
print_status "2. Your .env file has been automatically updated with the new table URLs!"
- print_status "3. The default admin user is: admin@thebunkerops.ca with password: admin123"
- print_status "4. IMPORTANT: Change the default password after first login!"
- print_status "5. Start adding your location data!"
+
+ if [[ "$MIGRATE_DATA" == "true" && -n "$SOURCE_BASE_ID" && -n "$SOURCE_TABLE_IDS" ]]; then
+ print_status "3. Your existing data has been migrated to the new base!"
+ print_status "4. Review the migrated data and verify everything transferred correctly"
+ print_status "5. If you had custom admin users, you may need to update passwords"
+ else
+ print_status "3. The default admin user is: admin@thebunkerops.ca with password: admin123"
+ print_status "4. IMPORTANT: Change the default password after first login!"
+ print_status "5. Start adding your location data!"
+ fi
print_warning ""
print_warning "IMPORTANT: This script created a NEW base. Your existing data was NOT modified."
print_warning "Your .env file has been automatically updated with the new table URLs."
print_warning "A backup of your previous .env file was created with a timestamp."
- print_warning "SECURITY: Change the default admin password immediately after first login!"
+
+ if [[ "$MIGRATE_DATA" != "true" ]]; then
+ print_warning "SECURITY: Change the default admin password immediately after first login!"
+ fi
+
+ if [[ "$MIGRATE_DATA" == "true" ]]; then
+ print_warning "DATA MIGRATION: Verify all migrated data is correct before using in production!"
+ print_warning "The original base remains unchanged as a backup."
+ fi
}
# Check if script is being run directly
diff --git a/map/files-explainer.md b/map/files-explainer.md
index f62c4e7..d923c66 100644
--- a/map/files-explainer.md
+++ b/map/files-explainer.md
@@ -24,7 +24,14 @@ Documents the development and requirements of the NocoDB automation script for t
# build-nocodb.sh
-Bash script to automate creation of required NocoDB tables and default data for the app.
+Bash script to automate creation of required NocoDB tables and default data for the app. Features:
+- Creates fresh NocoDB base with 6 required tables (locations, login, settings, shifts, shift_signups, cuts)
+- Optional data migration from existing NocoDB bases (--migrate-data flag)
+- Interactive base and table selection for migration
+- Preserves original data while creating new base
+- Auto-updates .env file with new table URLs
+- Dependency checking (requires jq and curl)
+- Comprehensive error handling and user feedback
# combined.log
diff --git a/map/test_print_debug.html b/map/test_print_debug.html
deleted file mode 100644
index 15a6361..0000000
--- a/map/test_print_debug.html
+++ /dev/null
@@ -1,89 +0,0 @@
-
-
-
- Print Debug Test
-
-
-
-
Print Debug Test
-
Check the browser console for test results.
-
This tests the enhanced CutPrintUtils functionality.
-
-
Key Improvements Made:
-
-
✅ Auto-load locations when printing if not already loaded
-
✅ Auto-display locations on map for print capture
-
✅ Enhanced map capture with html2canvas (priority #1)
-
✅ Improved dom-to-image capture with better filtering
-
✅ Better UI state management (toggle button updates)
-
✅ Enhanced debugging and logging
-
✅ Auto-show locations when viewing cuts (if enabled)
-
-
-
Root Cause Analysis:
-
The issue was that locations were not automatically displayed on the map when viewing a cut or printing.
- The print function expected locations to be visible but they were only shown when the user manually clicked "Show Locations".
-
-
Solution:
-
-
Print Enhancement: The print function now ensures locations are loaded and displayed before capturing the map
-
View Enhancement: When viewing a cut, locations are automatically loaded if the cut has show_locations enabled
-
Capture Enhancement: Improved map capture methods with html2canvas as primary method
-
State Management: Better synchronization between location visibility and UI state