allow peers backups
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1 +1,2 @@
|
|||||||
.env
|
.env
|
||||||
|
peer-backups/
|
||||||
|
|||||||
260
backup-peers.sh
Executable file
260
backup-peers.sh
Executable file
@@ -0,0 +1,260 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Script to backup peers from all running nodes
|
||||||
|
# Can be run as a cronjob to periodically backup peer lists
|
||||||
|
# Usage: ./backup-peers.sh [backup-directory]
|
||||||
|
|
||||||
|
BASEPATH="$(dirname "$0")"
|
||||||
|
source $BASEPATH/.env
|
||||||
|
|
||||||
|
# Default backup directory
|
||||||
|
BACKUP_DIR="${1:-$BASEPATH/peer-backups}"
|
||||||
|
|
||||||
|
# Create backup directory if it doesn't exist
|
||||||
|
mkdir -p "$BACKUP_DIR"
|
||||||
|
|
||||||
|
# Timestamp for this backup run
|
||||||
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||||
|
|
||||||
|
# Blacklist for compose files (same as show-status.sh)
|
||||||
|
blacklist=(
|
||||||
|
"drpc.yml" "drpc-free.yml" "drpc-home.yml" # dshackles
|
||||||
|
"arbitrum-one-mainnet-arbnode-archive-trace.yml" # always behind and no reference rpc
|
||||||
|
"ethereum-beacon-mainnet-lighthouse-pruned-blobs" # can't handle beacon rest api yet
|
||||||
|
"rpc.yml" "monitoring.yml" "ftp.yml" "backup-http.yml" "base.yml" # no rpcs
|
||||||
|
)
|
||||||
|
|
||||||
|
# Path blacklist (read from file if it exists)
|
||||||
|
path_blacklist=()
|
||||||
|
if [ -f "$BASEPATH/path-blacklist.txt" ]; then
|
||||||
|
while IFS= read -r line; do
|
||||||
|
if [ -n "$line" ]; then
|
||||||
|
path_blacklist+=("$line")
|
||||||
|
fi
|
||||||
|
done < "$BASEPATH/path-blacklist.txt"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Protocol and domain settings
|
||||||
|
if [ -n "$NO_SSL" ]; then
|
||||||
|
PROTO="http"
|
||||||
|
DOMAIN="${DOMAIN:-0.0.0.0}"
|
||||||
|
else
|
||||||
|
PROTO="https"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Function to extract RPC paths from a compose file
|
||||||
|
extract_rpc_paths() {
|
||||||
|
local compose_file="$1"
|
||||||
|
local full_path="$BASEPATH/${compose_file}"
|
||||||
|
|
||||||
|
if [ ! -f "$full_path" ]; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract paths using grep (same method as peer-count.sh)
|
||||||
|
pathlist=$(cat "$full_path" | grep -oP "stripprefix\.prefixes.*?/\K[^\"]+" 2>/dev/null)
|
||||||
|
|
||||||
|
if [ -z "$pathlist" ]; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$pathlist"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to check if a path should be included
|
||||||
|
should_include_path() {
|
||||||
|
local path="$1"
|
||||||
|
|
||||||
|
for word in "${path_blacklist[@]}"; do
|
||||||
|
if echo "$path" | grep -qE "$word"; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to backup peers from a single RPC endpoint
|
||||||
|
backup_peers_from_path() {
|
||||||
|
local compose_file="$1"
|
||||||
|
local path="$2"
|
||||||
|
local compose_name="${compose_file%.yml}"
|
||||||
|
|
||||||
|
# Sanitize compose name and path for filename
|
||||||
|
local safe_compose_name=$(echo "$compose_name" | sed 's/[^a-zA-Z0-9_-]/_/g')
|
||||||
|
local safe_path=$(echo "$path" | sed 's|[^a-zA-Z0-9_-]|_|g')
|
||||||
|
|
||||||
|
local RPC_URL="${PROTO}://${DOMAIN}${path}"
|
||||||
|
|
||||||
|
# Try admin_peers first (returns detailed peer info)
|
||||||
|
response=$(curl --ipv4 -L -s -X POST "$RPC_URL" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
--data '{"jsonrpc":"2.0","method":"admin_peers","params":[],"id":1}' \
|
||||||
|
--max-time 10 2>/dev/null)
|
||||||
|
|
||||||
|
# Check if we got a valid response
|
||||||
|
if echo "$response" | jq -e '.result' > /dev/null 2>&1; then
|
||||||
|
peer_count=$(echo "$response" | jq -r '.result | length')
|
||||||
|
|
||||||
|
if [ "$peer_count" -gt 0 ]; then
|
||||||
|
# Extract enodes
|
||||||
|
enodes=$(echo "$response" | jq -r '.result[].enode' 2>/dev/null | grep -v '^$' | grep -v '^null$')
|
||||||
|
|
||||||
|
if [ -n "$enodes" ]; then
|
||||||
|
# Create backup file
|
||||||
|
local backup_file="$BACKUP_DIR/${safe_compose_name}__${safe_path}__${TIMESTAMP}.json"
|
||||||
|
|
||||||
|
# Create JSON structure with metadata
|
||||||
|
{
|
||||||
|
echo "{"
|
||||||
|
echo " \"compose_file\": \"$compose_file\","
|
||||||
|
echo " \"rpc_path\": \"$path\","
|
||||||
|
echo " \"rpc_url\": \"$RPC_URL\","
|
||||||
|
echo " \"timestamp\": \"$TIMESTAMP\","
|
||||||
|
echo " \"peer_count\": $peer_count,"
|
||||||
|
echo " \"peers\": ["
|
||||||
|
|
||||||
|
# Write enodes as JSON array
|
||||||
|
first=true
|
||||||
|
while IFS= read -r enode; do
|
||||||
|
if [ -z "$enode" ] || [ "$enode" = "null" ]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$first" = true ]; then
|
||||||
|
first=false
|
||||||
|
else
|
||||||
|
echo ","
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Escape the enode string for JSON
|
||||||
|
escaped_enode=$(echo "$enode" | sed 's/\\/\\\\/g' | sed 's/"/\\"/g')
|
||||||
|
echo -n " \"$escaped_enode\""
|
||||||
|
done <<< "$enodes"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo " ]"
|
||||||
|
echo "}"
|
||||||
|
} > "$backup_file"
|
||||||
|
|
||||||
|
# Also create a simple text file with just enodes (one per line) for easy playback
|
||||||
|
local backup_txt_file="$BACKUP_DIR/${safe_compose_name}__${safe_path}__${TIMESTAMP}.txt"
|
||||||
|
echo "$enodes" > "$backup_txt_file"
|
||||||
|
|
||||||
|
echo "✓ Backed up $peer_count peer(s) from $compose_file ($path) to $(basename "$backup_file")"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "⚠ No peers found for $compose_file ($path)"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Check if this is a method not found error (consensus client or admin API disabled)
|
||||||
|
error_code=$(echo "$response" | jq -r '.error.code // empty' 2>/dev/null)
|
||||||
|
|
||||||
|
if [ -n "$error_code" ] && [ "$error_code" != "null" ]; then
|
||||||
|
# This is likely a consensus client endpoint, skip it silently
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Try net_peerCount as fallback (but we can't get enodes from this)
|
||||||
|
response=$(curl --ipv4 -L -s -X POST "$RPC_URL" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
--data '{"jsonrpc":"2.0","method":"net_peerCount","params":[],"id":1}' \
|
||||||
|
--max-time 10 2>/dev/null)
|
||||||
|
|
||||||
|
if echo "$response" | jq -e '.result' > /dev/null 2>&1; then
|
||||||
|
peer_count=$(echo "$response" | jq -r '.result' | xargs printf "%d")
|
||||||
|
if [ "$peer_count" -gt 0 ]; then
|
||||||
|
echo "⚠ $compose_file ($path) has $peer_count peer(s) but admin_peers not available (cannot backup enodes)"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main execution
|
||||||
|
if [ -z "$COMPOSE_FILE" ]; then
|
||||||
|
echo "Error: COMPOSE_FILE not found in $BASEPATH/.env" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Split COMPOSE_FILE by colon
|
||||||
|
IFS=':' read -ra parts <<< "$COMPOSE_FILE"
|
||||||
|
|
||||||
|
total_backed_up=0
|
||||||
|
total_failed=0
|
||||||
|
total_skipped=0
|
||||||
|
|
||||||
|
echo "Starting peer backup at $(date)"
|
||||||
|
echo "Backup directory: $BACKUP_DIR"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Process each compose file
|
||||||
|
for part in "${parts[@]}"; do
|
||||||
|
# Remove .yml extension if present for processing
|
||||||
|
compose_file="${part%.yml}.yml"
|
||||||
|
|
||||||
|
# Check blacklist
|
||||||
|
include=true
|
||||||
|
for word in "${blacklist[@]}"; do
|
||||||
|
if echo "$compose_file" | grep -qE "$word"; then
|
||||||
|
include=false
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ "$include" = false ]; then
|
||||||
|
total_skipped=$((total_skipped + 1))
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract RPC paths from compose file
|
||||||
|
paths=$(extract_rpc_paths "$compose_file")
|
||||||
|
|
||||||
|
if [ -z "$paths" ]; then
|
||||||
|
total_skipped=$((total_skipped + 1))
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Process each path
|
||||||
|
path_found=false
|
||||||
|
for path in $paths; do
|
||||||
|
# Check path blacklist
|
||||||
|
if should_include_path "$path"; then
|
||||||
|
path_found=true
|
||||||
|
if backup_peers_from_path "$compose_file" "$path"; then
|
||||||
|
total_backed_up=$((total_backed_up + 1))
|
||||||
|
else
|
||||||
|
total_failed=$((total_failed + 1))
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ "$path_found" = false ]; then
|
||||||
|
total_skipped=$((total_skipped + 1))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=========================================="
|
||||||
|
echo "Backup Summary"
|
||||||
|
echo "=========================================="
|
||||||
|
echo "Total nodes backed up: $total_backed_up"
|
||||||
|
echo "Total nodes failed: $total_failed"
|
||||||
|
echo "Total nodes skipped: $total_skipped"
|
||||||
|
echo "Backup directory: $BACKUP_DIR"
|
||||||
|
echo "Completed at $(date)"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Optional: Clean up old backups (keep last 30 days)
|
||||||
|
if [ -n "$CLEANUP_OLD_BACKUPS" ] && [ "$CLEANUP_OLD_BACKUPS" = "true" ]; then
|
||||||
|
echo "Cleaning up backups older than 30 days..."
|
||||||
|
find "$BACKUP_DIR" -name "*.json" -type f -mtime +30 -delete
|
||||||
|
find "$BACKUP_DIR" -name "*.txt" -type f -mtime +30 -delete
|
||||||
|
echo "Cleanup complete"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
||||||
90
list-peer-backups.sh
Executable file
90
list-peer-backups.sh
Executable file
@@ -0,0 +1,90 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Script to list available peer backups
|
||||||
|
# Usage: ./list-peer-backups.sh [backup-directory] [filter]
|
||||||
|
|
||||||
|
BASEPATH="$(dirname "$0")"
|
||||||
|
BACKUP_DIR="${1:-$BASEPATH/peer-backups}"
|
||||||
|
FILTER="${2:-}"
|
||||||
|
|
||||||
|
if [ ! -d "$BACKUP_DIR" ]; then
|
||||||
|
echo "Backup directory does not exist: $BACKUP_DIR"
|
||||||
|
echo "Run backup-peers.sh first to create backups"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Count files
|
||||||
|
json_count=$(find "$BACKUP_DIR" -name "*.json" -type f | wc -l | tr -d ' ')
|
||||||
|
txt_count=$(find "$BACKUP_DIR" -name "*.txt" -type f | wc -l | tr -d ' ')
|
||||||
|
|
||||||
|
if [ "$json_count" -eq 0 ] && [ "$txt_count" -eq 0 ]; then
|
||||||
|
echo "No backups found in $BACKUP_DIR"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Peer backups in: $BACKUP_DIR"
|
||||||
|
echo "Total backups: $json_count JSON, $txt_count TXT"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# List JSON backups with details
|
||||||
|
if [ "$json_count" -gt 0 ]; then
|
||||||
|
echo "JSON Backups (with metadata):"
|
||||||
|
echo "=========================================="
|
||||||
|
|
||||||
|
for backup_file in $(find "$BACKUP_DIR" -name "*.json" -type f | sort -r); do
|
||||||
|
if [ -n "$FILTER" ] && ! echo "$backup_file" | grep -qi "$FILTER"; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
if command -v jq &> /dev/null; then
|
||||||
|
compose_file=$(jq -r '.compose_file // "unknown"' "$backup_file" 2>/dev/null)
|
||||||
|
rpc_path=$(jq -r '.rpc_path // "unknown"' "$backup_file" 2>/dev/null)
|
||||||
|
timestamp=$(jq -r '.timestamp // "unknown"' "$backup_file" 2>/dev/null)
|
||||||
|
peer_count=$(jq -r '.peer_count // 0' "$backup_file" 2>/dev/null)
|
||||||
|
rpc_url=$(jq -r '.rpc_url // "unknown"' "$backup_file" 2>/dev/null)
|
||||||
|
|
||||||
|
file_size=$(du -h "$backup_file" | cut -f1)
|
||||||
|
file_date=$(stat -f "%Sm" -t "%Y-%m-%d %H:%M:%S" "$backup_file" 2>/dev/null || stat -c "%y" "$backup_file" 2>/dev/null | cut -d'.' -f1)
|
||||||
|
|
||||||
|
echo "File: $(basename "$backup_file")"
|
||||||
|
echo " Compose: $compose_file"
|
||||||
|
echo " Path: $rpc_path"
|
||||||
|
echo " URL: $rpc_url"
|
||||||
|
echo " Peers: $peer_count"
|
||||||
|
echo " Timestamp: $timestamp"
|
||||||
|
echo " Size: $file_size"
|
||||||
|
echo " Date: $file_date"
|
||||||
|
echo ""
|
||||||
|
else
|
||||||
|
# Fallback if jq is not available
|
||||||
|
file_size=$(du -h "$backup_file" | cut -f1)
|
||||||
|
file_date=$(stat -f "%Sm" -t "%Y-%m-%d %H:%M:%S" "$backup_file" 2>/dev/null || stat -c "%y" "$backup_file" 2>/dev/null | cut -d'.' -f1)
|
||||||
|
echo "$(basename "$backup_file") - $file_size - $file_date"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Show usage examples
|
||||||
|
echo ""
|
||||||
|
echo "To restore a backup, use:"
|
||||||
|
echo " ./restore-peers.sh <backup-file> [rpc-url]"
|
||||||
|
echo ""
|
||||||
|
echo "Examples:"
|
||||||
|
if [ "$json_count" -gt 0 ]; then
|
||||||
|
latest_json=$(find "$BACKUP_DIR" -name "*.json" -type f | sort -r | head -1)
|
||||||
|
if [ -n "$latest_json" ]; then
|
||||||
|
echo " ./restore-peers.sh $latest_json"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
if [ "$txt_count" -gt 0 ]; then
|
||||||
|
latest_txt=$(find "$BACKUP_DIR" -name "*.txt" -type f | sort -r | head -1)
|
||||||
|
if [ -n "$latest_txt" ]; then
|
||||||
|
if command -v jq &> /dev/null && [ -n "$latest_json" ]; then
|
||||||
|
rpc_url=$(jq -r '.rpc_url // ""' "$latest_json" 2>/dev/null)
|
||||||
|
if [ -n "$rpc_url" ]; then
|
||||||
|
echo " ./restore-peers.sh $latest_txt $rpc_url"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
162
restore-peers.sh
Executable file
162
restore-peers.sh
Executable file
@@ -0,0 +1,162 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Script to restore peers from a backup file
|
||||||
|
# Usage: ./restore-peers.sh <backup-file> [rpc-url]
|
||||||
|
# backup-file: Path to backup JSON or TXT file
|
||||||
|
# rpc-url: Optional. If not provided, will extract from backup file
|
||||||
|
|
||||||
|
BASEPATH="$(dirname "$0")"
|
||||||
|
source $BASEPATH/.env
|
||||||
|
|
||||||
|
if [ $# -lt 1 ]; then
|
||||||
|
echo "Usage: $0 <backup-file> [rpc-url]"
|
||||||
|
echo ""
|
||||||
|
echo " backup-file: Path to backup JSON or TXT file"
|
||||||
|
echo " rpc-url: Optional. Target RPC URL to restore peers to"
|
||||||
|
echo " If not provided, will use rpc_url from backup file"
|
||||||
|
echo ""
|
||||||
|
echo "Examples:"
|
||||||
|
echo " $0 peer-backups/ethereum-mainnet__ethereum-mainnet-archive__20240101_120000.json"
|
||||||
|
echo " $0 peer-backups/ethereum-mainnet__ethereum-mainnet-archive__20240101_120000.txt https://domain.com/ethereum-mainnet-archive"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
BACKUP_FILE="$1"
|
||||||
|
TARGET_URL="$2"
|
||||||
|
|
||||||
|
if [ ! -f "$BACKUP_FILE" ]; then
|
||||||
|
echo "Error: Backup file not found: $BACKUP_FILE" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Determine file type and extract enodes
|
||||||
|
if [[ "$BACKUP_FILE" == *.json ]]; then
|
||||||
|
# JSON backup file
|
||||||
|
if ! command -v jq &> /dev/null; then
|
||||||
|
echo "Error: jq is required to parse JSON backup files" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract RPC URL from backup if not provided
|
||||||
|
if [ -z "$TARGET_URL" ]; then
|
||||||
|
TARGET_URL=$(jq -r '.rpc_url // empty' "$BACKUP_FILE" 2>/dev/null)
|
||||||
|
if [ -z "$TARGET_URL" ] || [ "$TARGET_URL" = "null" ]; then
|
||||||
|
echo "Error: Could not extract rpc_url from backup file and no target URL provided" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract enodes
|
||||||
|
enodes=$(jq -r '.peers[]?' "$BACKUP_FILE" 2>/dev/null)
|
||||||
|
|
||||||
|
if [ -z "$enodes" ]; then
|
||||||
|
echo "Error: No peers found in backup file" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
peer_count=$(jq -r '.peer_count // 0' "$BACKUP_FILE" 2>/dev/null)
|
||||||
|
compose_file=$(jq -r '.compose_file // "unknown"' "$BACKUP_FILE" 2>/dev/null)
|
||||||
|
rpc_path=$(jq -r '.rpc_path // "unknown"' "$BACKUP_FILE" 2>/dev/null)
|
||||||
|
timestamp=$(jq -r '.timestamp // "unknown"' "$BACKUP_FILE" 2>/dev/null)
|
||||||
|
|
||||||
|
echo "Restoring peers from backup:"
|
||||||
|
echo " Compose file: $compose_file"
|
||||||
|
echo " RPC path: $rpc_path"
|
||||||
|
echo " Timestamp: $timestamp"
|
||||||
|
echo " Peer count: $peer_count"
|
||||||
|
echo " Target URL: $TARGET_URL"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
elif [[ "$BACKUP_FILE" == *.txt ]]; then
|
||||||
|
# TXT backup file (one enode per line)
|
||||||
|
enodes=$(grep -v '^$' "$BACKUP_FILE" | grep -v '^null$')
|
||||||
|
|
||||||
|
if [ -z "$enodes" ]; then
|
||||||
|
echo "Error: No peers found in backup file" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
peer_count=$(echo "$enodes" | wc -l | tr -d ' ')
|
||||||
|
|
||||||
|
if [ -z "$TARGET_URL" ]; then
|
||||||
|
echo "Error: Target RPC URL required for TXT backup files" >&2
|
||||||
|
echo "Usage: $0 <backup-file> <rpc-url>" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Restoring peers from backup:"
|
||||||
|
echo " Backup file: $BACKUP_FILE"
|
||||||
|
echo " Peer count: $peer_count"
|
||||||
|
echo " Target URL: $TARGET_URL"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
else
|
||||||
|
echo "Error: Unsupported backup file format. Expected .json or .txt" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Confirm before proceeding
|
||||||
|
read -p "Do you want to restore $peer_count peer(s) to $TARGET_URL? (y/N) " -n 1 -r
|
||||||
|
echo
|
||||||
|
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
echo "Restore cancelled"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Restore peers
|
||||||
|
success_count=0
|
||||||
|
failed_count=0
|
||||||
|
skipped_count=0
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Restoring peers..."
|
||||||
|
|
||||||
|
while IFS= read -r enode; do
|
||||||
|
if [ -z "$enode" ] || [ "$enode" = "null" ]; then
|
||||||
|
skipped_count=$((skipped_count + 1))
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if peer is reachable (optional, can be slow)
|
||||||
|
if [ -f "$BASEPATH/check-enode.sh" ]; then
|
||||||
|
if ! "$BASEPATH/check-enode.sh" "$enode" --target "$TARGET_URL" > /dev/null 2>&1; then
|
||||||
|
echo "⚠ Skipping unreachable peer: ${enode:0:50}..."
|
||||||
|
skipped_count=$((skipped_count + 1))
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -n "Adding peer: ${enode:0:50}... "
|
||||||
|
|
||||||
|
result=$(curl --ipv4 -X POST -H "Content-Type: application/json" \
|
||||||
|
--silent --max-time 10 \
|
||||||
|
--data "{\"jsonrpc\":\"2.0\",\"method\":\"admin_addPeer\",\"params\":[\"${enode}\"],\"id\":1}" \
|
||||||
|
"$TARGET_URL" 2>/dev/null | jq -r '.result // .error.message // "unknown error"' 2>/dev/null)
|
||||||
|
|
||||||
|
if [ "$result" = "true" ] || [ "$result" = "null" ]; then
|
||||||
|
echo "✓ Success"
|
||||||
|
success_count=$((success_count + 1))
|
||||||
|
else
|
||||||
|
echo "✗ Failed: $result"
|
||||||
|
failed_count=$((failed_count + 1))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Small delay to avoid overwhelming the node
|
||||||
|
sleep 0.1
|
||||||
|
done <<< "$enodes"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=========================================="
|
||||||
|
echo "Restore Summary"
|
||||||
|
echo "=========================================="
|
||||||
|
echo "Successful: $success_count/$peer_count"
|
||||||
|
echo "Failed: $failed_count/$peer_count"
|
||||||
|
echo "Skipped: $skipped_count/$peer_count"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [ $failed_count -gt 0 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
||||||
Reference in New Issue
Block a user