home
This commit is contained in:
178
list-port-forwards.py
Normal file
178
list-port-forwards.py
Normal file
@@ -0,0 +1,178 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import yaml
|
||||
import re
|
||||
import pwd
|
||||
import grp
|
||||
from pathlib import Path
|
||||
|
||||
def load_env_file(env_file_path='.env'):
|
||||
"""Load environment variables from .env file."""
|
||||
env_vars = {}
|
||||
|
||||
if not os.path.exists(env_file_path):
|
||||
print(f"Warning: {env_file_path} file not found in current directory")
|
||||
return env_vars
|
||||
|
||||
try:
|
||||
with open(env_file_path, 'r') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
# Skip empty lines and comments
|
||||
if not line or line.startswith('#'):
|
||||
continue
|
||||
|
||||
# Parse KEY=VALUE format
|
||||
if '=' in line:
|
||||
key, value = line.split('=', 1)
|
||||
key = key.strip()
|
||||
value = value.strip()
|
||||
|
||||
# Remove quotes if present
|
||||
if (value.startswith('"') and value.endswith('"')) or \
|
||||
(value.startswith("'") and value.endswith("'")):
|
||||
value = value[1:-1]
|
||||
|
||||
env_vars[key] = value
|
||||
|
||||
print(f"Loaded {len(env_vars)} variables from {env_file_path}")
|
||||
return env_vars
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error reading {env_file_path}: {e}")
|
||||
return env_vars
|
||||
|
||||
def extract_ports_from_yaml(yaml_file):
|
||||
"""Extract port mappings from a single YAML file."""
|
||||
try:
|
||||
if not os.path.exists(yaml_file):
|
||||
print(f"Warning: File {yaml_file} not found")
|
||||
return []
|
||||
|
||||
with open(yaml_file, 'r') as f:
|
||||
data = yaml.safe_load(f)
|
||||
|
||||
if not data or 'services' not in data:
|
||||
return []
|
||||
|
||||
port_mappings = []
|
||||
|
||||
for service_name, service_config in data['services'].items():
|
||||
if 'ports' in service_config:
|
||||
for port_mapping in service_config['ports']:
|
||||
# Handle different port mapping formats
|
||||
if isinstance(port_mapping, str):
|
||||
# Format: "host_port:container_port" or "host_port:container_port/protocol"
|
||||
port_parts = port_mapping.split(':')
|
||||
if len(port_parts) >= 2:
|
||||
host_port = port_parts[0]
|
||||
container_port = port_parts[1]
|
||||
# Remove protocol suffix if present (e.g., "/udp")
|
||||
host_port = re.sub(r'/\w+$', '', host_port)
|
||||
container_port = re.sub(r'/\w+$', '', container_port)
|
||||
port_mappings.append((service_name, host_port))
|
||||
elif isinstance(port_mapping, int):
|
||||
# Single port number
|
||||
port_mappings.append((service_name, str(port_mapping)))
|
||||
elif isinstance(port_mapping, dict):
|
||||
# Long form port mapping
|
||||
if 'published' in port_mapping:
|
||||
port_mappings.append((service_name, str(port_mapping['published'])))
|
||||
elif 'target' in port_mapping:
|
||||
port_mappings.append((service_name, str(port_mapping['target'])))
|
||||
|
||||
return port_mappings
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error parsing {yaml_file}: {e}")
|
||||
return []
|
||||
|
||||
def main():
|
||||
# Load .env file from current working directory
|
||||
env_vars = load_env_file()
|
||||
|
||||
# Get COMPOSE_FILE from .env file or environment variable as fallback
|
||||
compose_file_env = env_vars.get('COMPOSE_FILE') or os.getenv('COMPOSE_FILE', '')
|
||||
|
||||
if not compose_file_env:
|
||||
print("COMPOSE_FILE not found in .env file or environment variables")
|
||||
return
|
||||
|
||||
print(f"Found COMPOSE_FILE: {compose_file_env[:100]}{'...' if len(compose_file_env) > 100 else ''}")
|
||||
print()
|
||||
|
||||
# Split by colon to get individual YAML files
|
||||
yaml_files = compose_file_env.split(':')
|
||||
|
||||
# Filter out excluded files
|
||||
excluded_files = {'home.yml', 'base.yml', 'rpc.yml'}
|
||||
filtered_files = []
|
||||
|
||||
for yaml_file in yaml_files:
|
||||
yaml_file = yaml_file.strip()
|
||||
if yaml_file and Path(yaml_file).name not in excluded_files:
|
||||
filtered_files.append(yaml_file)
|
||||
|
||||
print(f"Processing {len(filtered_files)} YAML files...")
|
||||
print("=" * 50)
|
||||
|
||||
# Extract ports from each file
|
||||
all_port_mappings = []
|
||||
|
||||
for yaml_file in filtered_files:
|
||||
port_mappings = extract_ports_from_yaml(yaml_file)
|
||||
all_port_mappings.extend(port_mappings)
|
||||
|
||||
if port_mappings:
|
||||
print(f"\n{yaml_file}:")
|
||||
for service_name, port in port_mappings:
|
||||
print(f" {service_name} : {port}")
|
||||
|
||||
# Remove duplicates by converting to set then back to list
|
||||
unique_port_mappings = list(set(all_port_mappings))
|
||||
|
||||
# Sort the unique mappings
|
||||
sorted_mappings = sorted(unique_port_mappings, key=lambda x: (x[0], int(x[1]) if x[1].isdigit() else x[1]))
|
||||
|
||||
# Summary to console
|
||||
print("\n" + "=" * 50)
|
||||
print("SUMMARY - Unique Port Mappings:")
|
||||
print("=" * 50)
|
||||
|
||||
for service_name, port in sorted_mappings:
|
||||
print(f"{service_name} : {port}")
|
||||
|
||||
print(f"\nTotal unique port mappings found: {len(sorted_mappings)}")
|
||||
print(f"Duplicates removed: {len(all_port_mappings) - len(sorted_mappings)}")
|
||||
|
||||
# Save to file (clean format, no whitespaces)
|
||||
output_file = os.path.expanduser("~payne/port-forward.txt")
|
||||
try:
|
||||
# Write the file
|
||||
with open(output_file, 'w') as f:
|
||||
for service_name, port in sorted_mappings:
|
||||
f.write(f"{service_name}:{port}\n")
|
||||
|
||||
# Set ownership to payne:payne
|
||||
try:
|
||||
payne_user = pwd.getpwnam('payne')
|
||||
payne_group = grp.getgrnam('payne')
|
||||
os.chown(output_file, payne_user.pw_uid, payne_group.gr_gid)
|
||||
except KeyError:
|
||||
print("Warning: User or group 'payne' not found, skipping ownership change")
|
||||
except PermissionError:
|
||||
print("Warning: Permission denied setting ownership (you may need to run as root)")
|
||||
|
||||
# Set read permissions for user payne (644: owner read/write, group read, others read)
|
||||
os.chmod(output_file, 0o644)
|
||||
|
||||
print(f"\nResults saved to: {output_file}")
|
||||
print("File ownership set to payne:payne with read permissions")
|
||||
|
||||
except Exception as e:
|
||||
print(f"\nError saving to {output_file}: {e}")
|
||||
print("You may need to run with sudo or check file permissions")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
154
normalize-logs.py
Executable file
154
normalize-logs.py
Executable file
@@ -0,0 +1,154 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import json
|
||||
import datetime
|
||||
import shlex
|
||||
|
||||
def format_timestamp(ts):
|
||||
"""
|
||||
Converts an ISO8601 timestamp (with trailing Z) into the format MM-DD|HH:MM:SS.mmm.
|
||||
Example: '2025-03-25T09:41:48.829633093Z' -> '03-25|09:41:48.829'
|
||||
"""
|
||||
try:
|
||||
# Remove trailing "Z" if present and parse.
|
||||
dt = datetime.datetime.fromisoformat(ts.replace("Z", ""))
|
||||
formatted = dt.strftime("%m-%d|%H:%M:%S.%f")[:-3]
|
||||
return formatted
|
||||
except Exception:
|
||||
return ts
|
||||
|
||||
def format_timestamp_t(ts):
|
||||
"""
|
||||
Converts a timestamp from key/value logs (e.g. '2025-03-26T12:06:31+0000')
|
||||
into MM-DD|HH:MM:SS.mmm. Note: It fixes the offset format if necessary.
|
||||
"""
|
||||
try:
|
||||
# Fix offset if needed: insert a colon before the last two digits.
|
||||
if len(ts) >= 5 and (ts[-5] in ['+', '-'] and ts[-3] != ':'):
|
||||
ts = ts[:-2] + ':' + ts[-2:]
|
||||
dt = datetime.datetime.fromisoformat(ts)
|
||||
formatted = dt.strftime("%m-%d|%H:%M:%S.%f")[:-3]
|
||||
return formatted
|
||||
except Exception:
|
||||
return ts
|
||||
|
||||
def format_number(num):
|
||||
"""Formats an integer (or numeric string) with commas."""
|
||||
try:
|
||||
return f"{int(num):,}"
|
||||
except Exception:
|
||||
return num
|
||||
|
||||
def shorten_hash(h):
|
||||
"""
|
||||
Shortens a hash string by keeping the first 6 and last 5 characters.
|
||||
Example: '2544a06d361df25cb565b42b0ad0f7ee6cdaf0d714052a8cd3f0fdc3e3dad04a'
|
||||
becomes '2544a0..dad04a'.
|
||||
"""
|
||||
if isinstance(h, str) and len(h) > 12:
|
||||
return f"{h[:6]}..{h[-5:]}"
|
||||
return h
|
||||
|
||||
def process_json_line(obj):
|
||||
# Extract main fields.
|
||||
severity = obj.get("severity", "INFO").upper()
|
||||
timestamp = format_timestamp(obj.get("timestamp", ""))
|
||||
message = obj.get("message", "")
|
||||
out = f"{severity} [{timestamp}] {message}"
|
||||
|
||||
# These keys are already handled.
|
||||
skip_keys = {"severity", "timestamp", "message", "logger", "logging.googleapis.com/labels"}
|
||||
|
||||
# If JSON includes block_number and block_Id, handle specially.
|
||||
if "block_number" in obj:
|
||||
out += f" number={format_number(obj['block_number'])}"
|
||||
if "block_Id" in obj:
|
||||
out += f" hash={shorten_hash(obj['block_Id'])}"
|
||||
|
||||
for key in sorted(obj.keys()):
|
||||
if key in skip_keys or key in {"block_number", "block_Id"}:
|
||||
continue
|
||||
value = obj[key]
|
||||
if isinstance(value, int):
|
||||
value = format_number(value)
|
||||
out += f" {key}={value}"
|
||||
return out
|
||||
|
||||
def process_kv_line(line):
|
||||
"""
|
||||
Parses a key=value style log (like t=, lvl=, msg=, etc.) and outputs
|
||||
a standardized log line in the form: "LEVEL [MM-DD|HH:MM:SS.mmm] message additional_keys=values".
|
||||
"""
|
||||
try:
|
||||
tokens = shlex.split(line)
|
||||
except Exception:
|
||||
return line # if shlex fails, return the original line.
|
||||
|
||||
kv = {}
|
||||
for token in tokens:
|
||||
if '=' not in token:
|
||||
continue
|
||||
key, value = token.split("=", 1)
|
||||
kv[key] = value
|
||||
|
||||
# Main fields:
|
||||
ts = kv.get("t", "")
|
||||
timestamp = format_timestamp_t(ts)
|
||||
severity = kv.get("lvl", "INFO").upper()
|
||||
message = kv.get("msg", "")
|
||||
|
||||
out = f"{severity} [{timestamp}] {message}"
|
||||
|
||||
# Special handling for IDs that are combined values (hash:number).
|
||||
# If the "id" field exists and contains a colon, split it into hash and number.
|
||||
if "id" in kv and ':' in kv["id"]:
|
||||
hash_part, num_part = kv["id"].split(":", 1)
|
||||
out += f" hash={shorten_hash(hash_part)} number={format_number(num_part)}"
|
||||
# Remove these keys so they don't get printed again.
|
||||
kv.pop("id")
|
||||
else:
|
||||
# If "hash" and "number" exist separately, process them.
|
||||
if "hash" in kv:
|
||||
out += f" hash={shorten_hash(kv['hash'])}"
|
||||
kv.pop("hash")
|
||||
if "number" in kv:
|
||||
out += f" number={format_number(kv['number'])}"
|
||||
kv.pop("number")
|
||||
|
||||
# Keys to skip in extra printing.
|
||||
skip_keys = {"t", "lvl", "msg", "id", "hash", "number"}
|
||||
|
||||
for key in sorted(kv.keys()):
|
||||
if key in skip_keys:
|
||||
continue
|
||||
value = kv[key]
|
||||
# Optionally, format numbers if the value is numeric.
|
||||
try:
|
||||
int_val = int(value)
|
||||
value = format_number(int_val)
|
||||
except Exception:
|
||||
pass
|
||||
out += f" {key}={value}"
|
||||
return out
|
||||
|
||||
def main():
|
||||
for line in sys.stdin:
|
||||
line = line.rstrip("\n")
|
||||
# First, try JSON.
|
||||
try:
|
||||
obj = json.loads(line)
|
||||
print(process_json_line(obj))
|
||||
continue
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
# Next, if the line starts with "t=" or "lvl=", assume key-value style.
|
||||
if line.startswith("t=") or line.startswith("lvl="):
|
||||
print(process_kv_line(line))
|
||||
continue
|
||||
|
||||
# Otherwise, assume it's already in standard plain text.
|
||||
print(line)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
5
update-ip.sh
Executable file
5
update-ip.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd /root/rpc
|
||||
sed -i.bak "s/IP=.*/IP=$(curl ipinfo.io/ip)/g" .env
|
||||
docker compose up -d
|
||||
Reference in New Issue
Block a user