another build update
This commit is contained in:
parent
609f89ec0c
commit
f44cb35253
@ -189,14 +189,14 @@ make_api_call() {
|
|||||||
if [ "$method" = "GET" ]; then
|
if [ "$method" = "GET" ]; then
|
||||||
response=$(curl -s -w "%{http_code}" -H "xc-token: $NOCODB_API_TOKEN" \
|
response=$(curl -s -w "%{http_code}" -H "xc-token: $NOCODB_API_TOKEN" \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
--max-time 30 \
|
--max-time 60 \
|
||||||
"$full_url" 2>/dev/null)
|
"$full_url" 2>/dev/null)
|
||||||
curl_exit_code=$?
|
curl_exit_code=$?
|
||||||
else
|
else
|
||||||
response=$(curl -s -w "%{http_code}" -X "$method" \
|
response=$(curl -s -w "%{http_code}" -X "$method" \
|
||||||
-H "xc-token: $NOCODB_API_TOKEN" \
|
-H "xc-token: $NOCODB_API_TOKEN" \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
--max-time 30 \
|
--max-time 60 \
|
||||||
-d "$data" \
|
-d "$data" \
|
||||||
"$full_url" 2>/dev/null)
|
"$full_url" 2>/dev/null)
|
||||||
curl_exit_code=$?
|
curl_exit_code=$?
|
||||||
@ -368,7 +368,7 @@ export_table_data() {
|
|||||||
print_status "Found $total_count records in table: $table_name"
|
print_status "Found $total_count records in table: $table_name"
|
||||||
|
|
||||||
# If we have a small number of records, get them all at once
|
# If we have a small number of records, get them all at once
|
||||||
if [[ "$total_count" -le 1000 ]]; then
|
if [[ "$total_count" -le 100 ]]; then
|
||||||
local response
|
local response
|
||||||
response=$(make_api_call "GET" "/tables/$table_id/records?limit=$total_count" "" "Exporting all $total_count records from $table_name" "v2")
|
response=$(make_api_call "GET" "/tables/$table_id/records?limit=$total_count" "" "Exporting all $total_count records from $table_name" "v2")
|
||||||
|
|
||||||
@ -380,12 +380,12 @@ export_table_data() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
# For larger datasets, paginate through all records
|
# For larger datasets, paginate through all records in smaller chunks
|
||||||
print_status "Large dataset detected. Paginating through all $total_count records..."
|
print_status "Dataset with $total_count records detected. Paginating in chunks of 100..."
|
||||||
|
|
||||||
local all_records="[]"
|
local all_records="[]"
|
||||||
local offset=0
|
local offset=0
|
||||||
local limit=1000
|
local limit=100
|
||||||
local batch_num=1
|
local batch_num=1
|
||||||
|
|
||||||
while [[ $offset -lt $total_count ]]; do
|
while [[ $offset -lt $total_count ]]; do
|
||||||
@ -398,12 +398,25 @@ export_table_data() {
|
|||||||
print_status "Fetching batch $batch_num: records $((offset + 1)) to $((offset + current_limit)) of $total_count"
|
print_status "Fetching batch $batch_num: records $((offset + 1)) to $((offset + current_limit)) of $total_count"
|
||||||
|
|
||||||
local batch_response
|
local batch_response
|
||||||
batch_response=$(make_api_call "GET" "/tables/$table_id/records?limit=$current_limit&offset=$offset" "" "Fetching batch $batch_num" "v2")
|
local retry_count=0
|
||||||
|
local max_retries=3
|
||||||
|
|
||||||
if [[ $? -ne 0 ]]; then
|
while [[ $retry_count -lt $max_retries ]]; do
|
||||||
print_error "Failed to fetch batch $batch_num from table: $table_name"
|
batch_response=$(make_api_call "GET" "/tables/$table_id/records?limit=$current_limit&offset=$offset" "" "Fetching batch $batch_num (attempt $((retry_count + 1)))" "v2")
|
||||||
return 1
|
|
||||||
fi
|
if [[ $? -eq 0 ]]; then
|
||||||
|
break
|
||||||
|
else
|
||||||
|
retry_count=$((retry_count + 1))
|
||||||
|
if [[ $retry_count -lt $max_retries ]]; then
|
||||||
|
print_warning "Batch $batch_num failed, retrying in 3 seconds... (attempt $retry_count/$max_retries)"
|
||||||
|
sleep 3
|
||||||
|
else
|
||||||
|
print_error "Failed to fetch batch $batch_num from table: $table_name after $max_retries attempts"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
# Extract records from this batch and merge with all_records
|
# Extract records from this batch and merge with all_records
|
||||||
local batch_records
|
local batch_records
|
||||||
@ -416,11 +429,13 @@ export_table_data() {
|
|||||||
offset=$((offset + current_limit))
|
offset=$((offset + current_limit))
|
||||||
batch_num=$((batch_num + 1))
|
batch_num=$((batch_num + 1))
|
||||||
|
|
||||||
# Small delay to avoid overwhelming the API, longer delay for larger datasets
|
# Conservative delays to avoid overwhelming the API
|
||||||
if [[ $total_count -gt 5000 ]]; then
|
if [[ $total_count -gt 2000 ]]; then
|
||||||
sleep 1
|
sleep 2 # Longer delay for very large datasets
|
||||||
|
elif [[ $total_count -gt 500 ]]; then
|
||||||
|
sleep 1 # Medium delay for medium datasets
|
||||||
else
|
else
|
||||||
sleep 0.5
|
sleep 0.5 # Short delay for smaller datasets
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
@ -473,12 +488,12 @@ import_table_data() {
|
|||||||
echo "0" > "$temp_file"
|
echo "0" > "$temp_file"
|
||||||
|
|
||||||
# Add progress reporting for large datasets
|
# Add progress reporting for large datasets
|
||||||
local progress_interval=50
|
local progress_interval=25
|
||||||
if [[ $total_records -gt 200 ]]; then
|
if [[ $total_records -gt 200 ]]; then
|
||||||
progress_interval=100
|
progress_interval=50
|
||||||
fi
|
fi
|
||||||
if [[ $total_records -gt 1000 ]]; then
|
if [[ $total_records -gt 1000 ]]; then
|
||||||
progress_interval=250
|
progress_interval=100
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Parse records and import them one by one (to handle potential ID conflicts)
|
# Parse records and import them one by one (to handle potential ID conflicts)
|
||||||
@ -507,15 +522,30 @@ import_table_data() {
|
|||||||
# Use a simpler call without the make_api_call wrapper for better error handling
|
# Use a simpler call without the make_api_call wrapper for better error handling
|
||||||
local response
|
local response
|
||||||
local http_code
|
local http_code
|
||||||
|
local retry_count=0
|
||||||
|
local max_retries=2
|
||||||
|
|
||||||
response=$(curl -s -w "%{http_code}" -X "POST" \
|
while [[ $retry_count -lt $max_retries ]]; do
|
||||||
-H "xc-token: $NOCODB_API_TOKEN" \
|
response=$(curl -s -w "%{http_code}" -X "POST" \
|
||||||
-H "Content-Type: application/json" \
|
-H "xc-token: $NOCODB_API_TOKEN" \
|
||||||
--max-time 30 \
|
-H "Content-Type: application/json" \
|
||||||
-d "$cleaned_record" \
|
--max-time 60 \
|
||||||
"$API_BASE_V2/tables/$table_id/records" 2>/dev/null)
|
-d "$cleaned_record" \
|
||||||
|
"$API_BASE_V2/tables/$table_id/records" 2>/dev/null)
|
||||||
http_code="${response: -3}"
|
|
||||||
|
http_code="${response: -3}"
|
||||||
|
|
||||||
|
# If successful, break out of retry loop
|
||||||
|
if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
|
retry_count=$((retry_count + 1))
|
||||||
|
if [[ $retry_count -lt $max_retries ]]; then
|
||||||
|
print_warning "Import failed for record $import_count, retrying... (attempt $retry_count/$max_retries)"
|
||||||
|
sleep 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then
|
if [[ "$http_code" -ge 200 && "$http_code" -lt 300 ]]; then
|
||||||
success_count=$(cat "$temp_file")
|
success_count=$(cat "$temp_file")
|
||||||
@ -530,6 +560,11 @@ import_table_data() {
|
|||||||
local response_body="${response%???}"
|
local response_body="${response%???}"
|
||||||
print_warning "✗ Failed to import record $import_count/$total_records: $response_body"
|
print_warning "✗ Failed to import record $import_count/$total_records: $response_body"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Small delay between imports for large datasets to be API-friendly
|
||||||
|
if [[ $total_records -gt 500 && $import_count -lt $total_records ]]; then
|
||||||
|
sleep 0.1
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Read final success count
|
# Read final success count
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user