another build update
This commit is contained in:
parent
609f89ec0c
commit
f44cb35253
@ -189,14 +189,14 @@ make_api_call() {
|
||||
if [ "$method" = "GET" ]; then
|
||||
response=$(curl -s -w "%{http_code}" -H "xc-token: $NOCODB_API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
--max-time 30 \
|
||||
--max-time 60 \
|
||||
"$full_url" 2>/dev/null)
|
||||
curl_exit_code=$?
|
||||
else
|
||||
response=$(curl -s -w "%{http_code}" -X "$method" \
|
||||
-H "xc-token: $NOCODB_API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
--max-time 30 \
|
||||
--max-time 60 \
|
||||
-d "$data" \
|
||||
"$full_url" 2>/dev/null)
|
||||
curl_exit_code=$?
|
||||
@ -368,7 +368,7 @@ export_table_data() {
|
||||
print_status "Found $total_count records in table: $table_name"
|
||||
|
||||
# If we have a small number of records, get them all at once
|
||||
if [[ "$total_count" -le 1000 ]]; then
|
||||
if [[ "$total_count" -le 100 ]]; then
|
||||
local response
|
||||
response=$(make_api_call "GET" "/tables/$table_id/records?limit=$total_count" "" "Exporting all $total_count records from $table_name" "v2")
|
||||
|
||||
@ -380,12 +380,12 @@ export_table_data() {
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
# For larger datasets, paginate through all records
|
||||
print_status "Large dataset detected. Paginating through all $total_count records..."
|
||||
# For larger datasets, paginate through all records in smaller chunks
|
||||
print_status "Dataset with $total_count records detected. Paginating in chunks of 100..."
|
||||
|
||||
local all_records="[]"
|
||||
local offset=0
|
||||
local limit=1000
|
||||
local limit=100
|
||||
local batch_num=1
|
||||
|
||||
while [[ $offset -lt $total_count ]]; do
|
||||
@ -398,12 +398,25 @@ export_table_data() {
|
||||
print_status "Fetching batch $batch_num: records $((offset + 1)) to $((offset + current_limit)) of $total_count"
|
||||
|
||||
local batch_response
|
||||
batch_response=$(make_api_call "GET" "/tables/$table_id/records?limit=$current_limit&offset=$offset" "" "Fetching batch $batch_num" "v2")
|
||||
local retry_count=0
|
||||
local max_retries=3
|
||||
|
||||
if [[ $? -ne 0 ]]; then
|
||||
print_error "Failed to fetch batch $batch_num from table: $table_name"
|
||||
while [[ $retry_count -lt $max_retries ]]; do
|
||||
batch_response=$(make_api_call "GET" "/tables/$table_id/records?limit=$current_limit&offset=$offset" "" "Fetching batch $batch_num (attempt $((retry_count + 1)))" "v2")
|
||||
|
||||
if [[ $? -eq 0 ]]; then
|
||||
break
|
||||
else
|
||||
retry_count=$((retry_count + 1))
|
||||
if [[ $retry_count -lt $max_retries ]]; then
|
||||
print_warning "Batch $batch_num failed, retrying in 3 seconds... (attempt $retry_count/$max_retries)"
|
||||
sleep 3
|
||||
else
|
||||
print_error "Failed to fetch batch $batch_num from table: $table_name after $max_retries attempts"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Extract records from this batch and merge with all_records
|
||||
local batch_records
|
||||
@ -416,11 +429,13 @@ export_table_data() {
|
||||
offset=$((offset + current_limit))
|
||||
batch_num=$((batch_num + 1))
|
||||
|
||||
# Small delay to avoid overwhelming the API, longer delay for larger datasets
|
||||
if [[ $total_count -gt 5000 ]]; then
|
||||
sleep 1
|
||||
# Conservative delays to avoid overwhelming the API
|
||||
if [[ $total_count -gt 2000 ]]; then
|
||||
sleep 2 # Longer delay for very large datasets
|
||||
elif [[ $total_count -gt 500 ]]; then
|
||||
sleep 1 # Medium delay for medium datasets
|
||||
else
|
||||
sleep 0.5
|
||||
sleep 0.5 # Short delay for smaller datasets
|
||||
fi
|
||||
done
|
||||
|
||||
@ -473,12 +488,12 @@ import_table_data() {
|
||||
echo "0" > "$temp_file"
|
||||
|
||||
# Add progress reporting for large datasets
|
||||
local progress_interval=50
|
||||
local progress_interval=25
|
||||
if [[ $total_records -gt 200 ]]; then
|
||||
progress_interval=100
|
||||
progress_interval=50
|
||||
fi
|
||||
if [[ $total_records -gt 1000 ]]; then
|
||||
progress_interval=250
|
||||
progress_interval=100
|
||||
fi
|
||||
|
||||
# Parse records and import them one by one (to handle potential ID conflicts)
|
||||
@ -507,16 +522,31 @@ import_table_data() {
|
||||
# Use a simpler call without the make_api_call wrapper for better error handling
|
||||
local response
|
||||
local http_code
|
||||
local retry_count=0
|
||||
local max_retries=2
|
||||
|
||||
while [[ $retry_count -lt $max_retries ]]; do
|
||||
response=$(curl -s -w "%{http_code}" -X "POST" \
|
||||
-H "xc-token: $NOCODB_API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
--max-time 30 \
|
||||
--max-time 60 \
|
||||
-d "$cleaned_record" \
|
||||
"$API_BASE_V2/tables/$table_id/records" 2>/dev/null)
|
||||
|
||||
http_code="${response: -3}"
|
||||
|
||||
# If successful, break out of retry loop
|
||||
if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then
|
||||
break
|
||||
fi
|
||||
|
||||
retry_count=$((retry_count + 1))
|
||||
if [[ $retry_count -lt $max_retries ]]; then
|
||||
print_warning "Import failed for record $import_count, retrying... (attempt $retry_count/$max_retries)"
|
||||
sleep 1
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then
|
||||
success_count=$(cat "$temp_file")
|
||||
success_count=$((success_count + 1))
|
||||
@ -530,6 +560,11 @@ import_table_data() {
|
||||
local response_body="${response%???}"
|
||||
print_warning "✗ Failed to import record $import_count/$total_records: $response_body"
|
||||
fi
|
||||
|
||||
# Small delay between imports for large datasets to be API-friendly
|
||||
if [[ $total_records -gt 500 && $import_count -lt $total_records ]]; then
|
||||
sleep 0.1
|
||||
fi
|
||||
done
|
||||
|
||||
# Read final success count
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user