From f44cb35253e79a1fda54de3155bfb46d017dd5a9 Mon Sep 17 00:00:00 2001 From: admin Date: Wed, 10 Sep 2025 18:07:09 -0600 Subject: [PATCH] another build update --- map/build-nocodb.sh | 87 +++++++++++++++++++++++++++++++-------------- 1 file changed, 61 insertions(+), 26 deletions(-) diff --git a/map/build-nocodb.sh b/map/build-nocodb.sh index d7b2828..0740b52 100755 --- a/map/build-nocodb.sh +++ b/map/build-nocodb.sh @@ -189,14 +189,14 @@ make_api_call() { if [ "$method" = "GET" ]; then response=$(curl -s -w "%{http_code}" -H "xc-token: $NOCODB_API_TOKEN" \ -H "Content-Type: application/json" \ - --max-time 30 \ + --max-time 60 \ "$full_url" 2>/dev/null) curl_exit_code=$? else response=$(curl -s -w "%{http_code}" -X "$method" \ -H "xc-token: $NOCODB_API_TOKEN" \ -H "Content-Type: application/json" \ - --max-time 30 \ + --max-time 60 \ -d "$data" \ "$full_url" 2>/dev/null) curl_exit_code=$? @@ -368,7 +368,7 @@ export_table_data() { print_status "Found $total_count records in table: $table_name" # If we have a small number of records, get them all at once - if [[ "$total_count" -le 1000 ]]; then + if [[ "$total_count" -le 100 ]]; then local response response=$(make_api_call "GET" "/tables/$table_id/records?limit=$total_count" "" "Exporting all $total_count records from $table_name" "v2") @@ -380,12 +380,12 @@ export_table_data() { return 1 fi else - # For larger datasets, paginate through all records - print_status "Large dataset detected. Paginating through all $total_count records..." + # For larger datasets, paginate through all records in smaller chunks + print_status "Dataset with $total_count records detected. Paginating in chunks of 100..." local all_records="[]" local offset=0 - local limit=1000 + local limit=100 local batch_num=1 while [[ $offset -lt $total_count ]]; do @@ -398,12 +398,25 @@ export_table_data() { print_status "Fetching batch $batch_num: records $((offset + 1)) to $((offset + current_limit)) of $total_count" local batch_response - batch_response=$(make_api_call "GET" "/tables/$table_id/records?limit=$current_limit&offset=$offset" "" "Fetching batch $batch_num" "v2") + local retry_count=0 + local max_retries=3 - if [[ $? -ne 0 ]]; then - print_error "Failed to fetch batch $batch_num from table: $table_name" - return 1 - fi + while [[ $retry_count -lt $max_retries ]]; do + batch_response=$(make_api_call "GET" "/tables/$table_id/records?limit=$current_limit&offset=$offset" "" "Fetching batch $batch_num (attempt $((retry_count + 1)))" "v2") + + if [[ $? -eq 0 ]]; then + break + else + retry_count=$((retry_count + 1)) + if [[ $retry_count -lt $max_retries ]]; then + print_warning "Batch $batch_num failed, retrying in 3 seconds... (attempt $retry_count/$max_retries)" + sleep 3 + else + print_error "Failed to fetch batch $batch_num from table: $table_name after $max_retries attempts" + return 1 + fi + fi + done # Extract records from this batch and merge with all_records local batch_records @@ -416,11 +429,13 @@ export_table_data() { offset=$((offset + current_limit)) batch_num=$((batch_num + 1)) - # Small delay to avoid overwhelming the API, longer delay for larger datasets - if [[ $total_count -gt 5000 ]]; then - sleep 1 + # Conservative delays to avoid overwhelming the API + if [[ $total_count -gt 2000 ]]; then + sleep 2 # Longer delay for very large datasets + elif [[ $total_count -gt 500 ]]; then + sleep 1 # Medium delay for medium datasets else - sleep 0.5 + sleep 0.5 # Short delay for smaller datasets fi done @@ -473,12 +488,12 @@ import_table_data() { echo "0" > "$temp_file" # Add progress reporting for large datasets - local progress_interval=50 + local progress_interval=25 if [[ $total_records -gt 200 ]]; then - progress_interval=100 + progress_interval=50 fi if [[ $total_records -gt 1000 ]]; then - progress_interval=250 + progress_interval=100 fi # Parse records and import them one by one (to handle potential ID conflicts) @@ -507,15 +522,30 @@ import_table_data() { # Use a simpler call without the make_api_call wrapper for better error handling local response local http_code + local retry_count=0 + local max_retries=2 - response=$(curl -s -w "%{http_code}" -X "POST" \ - -H "xc-token: $NOCODB_API_TOKEN" \ - -H "Content-Type: application/json" \ - --max-time 30 \ - -d "$cleaned_record" \ - "$API_BASE_V2/tables/$table_id/records" 2>/dev/null) - - http_code="${response: -3}" + while [[ $retry_count -lt $max_retries ]]; do + response=$(curl -s -w "%{http_code}" -X "POST" \ + -H "xc-token: $NOCODB_API_TOKEN" \ + -H "Content-Type: application/json" \ + --max-time 60 \ + -d "$cleaned_record" \ + "$API_BASE_V2/tables/$table_id/records" 2>/dev/null) + + http_code="${response: -3}" + + # If successful, break out of retry loop + if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then + break + fi + + retry_count=$((retry_count + 1)) + if [[ $retry_count -lt $max_retries ]]; then + print_warning "Import failed for record $import_count, retrying... (attempt $retry_count/$max_retries)" + sleep 1 + fi + done if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then success_count=$(cat "$temp_file") @@ -530,6 +560,11 @@ import_table_data() { local response_body="${response%???}" print_warning "✗ Failed to import record $import_count/$total_records: $response_body" fi + + # Small delay between imports for large datasets to be API-friendly + if [[ $total_records -gt 500 && $import_count -lt $total_records ]]; then + sleep 0.1 + fi done # Read final success count