dev: automated commit - 2025-10-26 10:46:10
This commit is contained in:
parent
211d9e03bd
commit
9462eda5e2
2 changed files with 169 additions and 48 deletions
|
|
@ -149,15 +149,49 @@ def group_connections(connections):
|
|||
|
||||
return folders
|
||||
|
||||
def generate_dbeaver_config(connections):
|
||||
def is_sdm_connection(conn_name):
|
||||
"""Check if connection name matches SDM pattern (oc-*-*-*)"""
|
||||
pattern = r'oc-([^-]+)-([^-]+)-.*'
|
||||
return re.match(pattern, conn_name) is not None
|
||||
|
||||
def load_existing_config(output_file):
|
||||
"""Load existing DBeaver config, return empty if file doesn't exist"""
|
||||
if not os.path.exists(output_file):
|
||||
return {"folders": {}, "connections": {}}
|
||||
|
||||
try:
|
||||
with open(output_file, 'r') as f:
|
||||
return json.load(f)
|
||||
except json.JSONDecodeError:
|
||||
print("⚠️ Existing config file is invalid, starting fresh")
|
||||
return {"folders": {}, "connections": {}}
|
||||
|
||||
def generate_dbeaver_config(connections, existing_config):
|
||||
folders = group_connections(connections)
|
||||
|
||||
folders_config = {}
|
||||
# Start with existing folders
|
||||
folders_config = existing_config.get("folders", {}).copy()
|
||||
|
||||
# Add folders for new SDM connections
|
||||
for folder_name in folders.keys():
|
||||
folders_config[folder_name] = {}
|
||||
if folder_name not in folders_config:
|
||||
folders_config[folder_name] = {}
|
||||
|
||||
# Start with existing connections, filtering out SDM connections
|
||||
connections_config = {}
|
||||
existing_connections = existing_config.get("connections", {})
|
||||
for conn_id, conn in existing_connections.items():
|
||||
# Keep non-SDM connections (those that don't start with oc- in their display name)
|
||||
display_name = conn.get("name", "")
|
||||
# Also check configuration host/type for SDM patterns
|
||||
config = conn.get("configuration", {})
|
||||
conn_type = config.get("type", "")
|
||||
|
||||
# Only keep connections that don't match SDM pattern
|
||||
if not is_sdm_connection(display_name) and not is_sdm_connection(conn_type):
|
||||
connections_config[conn_id] = conn
|
||||
|
||||
# Add new SDM connections
|
||||
for conn in connections:
|
||||
connection_id = f"postgres-jdbc-{uuid.uuid4().hex[:8]}-{uuid.uuid4().hex[:8]}"
|
||||
conn_config = create_dbeaver_connection(conn, connection_id)
|
||||
|
|
@ -171,21 +205,32 @@ def generate_dbeaver_config(connections):
|
|||
def main():
|
||||
print("Generating DBeaver data-sources.json from sdm status --json...")
|
||||
|
||||
output_file = os.path.expanduser('~/.local/share/DBeaverData/workspace6/Stuzo/.dbeaver/data-sources.json')
|
||||
|
||||
# Load existing config to preserve non-SDM connections
|
||||
existing_config = load_existing_config(output_file)
|
||||
existing_count = len(existing_config.get("connections", {}))
|
||||
print(f"📁 Found {existing_count} existing connections")
|
||||
|
||||
sdm_data = run_sdm_status()
|
||||
if not sdm_data:
|
||||
return
|
||||
|
||||
connections = parse_postgres_connections(sdm_data)
|
||||
print(f"Found {len(connections)} PostgreSQL connections")
|
||||
print(f"🔍 Found {len(connections)} PostgreSQL connections from SDM")
|
||||
|
||||
folders = group_connections(connections)
|
||||
|
||||
for folder_name, conns in folders.items():
|
||||
print(f" {folder_name}: {len(conns)} connections")
|
||||
|
||||
dbeaver_config = generate_dbeaver_config(connections)
|
||||
dbeaver_config = generate_dbeaver_config(connections, existing_config)
|
||||
|
||||
final_count = len(dbeaver_config["connections"])
|
||||
preserved_count = final_count - len(connections)
|
||||
print(f"💾 Preserved {preserved_count} non-SDM connections")
|
||||
print(f"📊 Total connections: {final_count} ({preserved_count} preserved + {len(connections)} SDM)")
|
||||
|
||||
output_file = os.path.expanduser('~/.local/share/DBeaverData/workspace6/Stuzo/.dbeaver/data-sources.json')
|
||||
try:
|
||||
with open(output_file, 'w') as f:
|
||||
json.dump(dbeaver_config, f, indent='\t')
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import subprocess
|
|||
import json
|
||||
import re
|
||||
import os
|
||||
import yaml
|
||||
from collections import defaultdict
|
||||
|
||||
def run_sdm_status():
|
||||
|
|
@ -27,32 +28,30 @@ def run_sdm_status():
|
|||
|
||||
def parse_redis_connections(sdm_data):
|
||||
connections = []
|
||||
excluded = ['activate-cache', 'activate-readonly', 'readonly-redis']
|
||||
|
||||
for item in sdm_data:
|
||||
if item.get('type') == 'redis':
|
||||
name = item.get('name', '')
|
||||
address = item.get('address', '')
|
||||
if item.get('type') != 'redis':
|
||||
continue
|
||||
|
||||
if ('activate-cache' in name or 'activate-readonly' in name or
|
||||
'readonly-redis' in name):
|
||||
continue
|
||||
|
||||
if ':' in address:
|
||||
addr, port = address.split(':')
|
||||
port = int(port)
|
||||
else:
|
||||
continue
|
||||
|
||||
env_info = parse_connection_name(name)
|
||||
if env_info:
|
||||
connections.append({
|
||||
'name': name,
|
||||
'addr': addr,
|
||||
'port': port,
|
||||
'environment': env_info['environment'],
|
||||
'customer': env_info['customer'],
|
||||
'stage': env_info['stage']
|
||||
})
|
||||
name = item.get('name', '')
|
||||
if any(x in name for x in excluded):
|
||||
continue
|
||||
|
||||
address = item.get('address', '')
|
||||
if ':' not in address:
|
||||
continue
|
||||
|
||||
addr, port = address.split(':')
|
||||
env_info = parse_connection_name(name)
|
||||
|
||||
if env_info:
|
||||
connections.append({
|
||||
'name': name,
|
||||
'addr': addr,
|
||||
'port': int(port),
|
||||
**env_info
|
||||
})
|
||||
|
||||
return connections
|
||||
|
||||
|
|
@ -81,6 +80,57 @@ def parse_connection_name(name):
|
|||
'stage': stage
|
||||
}
|
||||
|
||||
def is_sdm_connection(conn_name):
|
||||
"""Check if connection name matches SDM pattern (oc-*-*-*)"""
|
||||
pattern = r'oc-([^-]+)-([^-]+)-.*'
|
||||
return re.match(pattern, conn_name) is not None
|
||||
|
||||
def load_existing_connections(output_file):
|
||||
"""Load existing connections, filtering out SDM connections"""
|
||||
if not os.path.exists(output_file):
|
||||
return {}
|
||||
|
||||
try:
|
||||
with open(output_file, 'r') as f:
|
||||
data = yaml.safe_load(f) or []
|
||||
|
||||
preserved_groups = {}
|
||||
|
||||
for item in data:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
|
||||
# Handle grouped connections
|
||||
if item.get("type") == "group":
|
||||
group_name = item.get("name", "")
|
||||
connections = item.get("connections", [])
|
||||
|
||||
# Filter out SDM connections
|
||||
non_sdm_conns = []
|
||||
for conn in connections:
|
||||
conn_name = conn.get("name", "")
|
||||
if conn_name and not is_sdm_connection(conn_name):
|
||||
non_sdm_conns.append(conn)
|
||||
|
||||
if non_sdm_conns:
|
||||
preserved_groups[group_name] = non_sdm_conns
|
||||
|
||||
# Handle ungrouped connections (top-level connections)
|
||||
elif item.get("type") != "group" and "addr" in item and "port" in item:
|
||||
# This is a standalone connection (not in a group)
|
||||
conn_name = item.get("name", "")
|
||||
if conn_name and not is_sdm_connection(conn_name):
|
||||
# Put ungrouped connections into a special group
|
||||
if 'ungrouped' not in preserved_groups:
|
||||
preserved_groups['ungrouped'] = []
|
||||
preserved_groups['ungrouped'].append(item)
|
||||
|
||||
return preserved_groups
|
||||
|
||||
except Exception as e:
|
||||
print(f"⚠️ Error reading existing config: {e}")
|
||||
return {}
|
||||
|
||||
def create_connection_yaml(name, addr, port, stage):
|
||||
colors = {
|
||||
'internal': '#4ECF60',
|
||||
|
|
@ -108,7 +158,7 @@ def group_connections(connections):
|
|||
groups = defaultdict(list)
|
||||
|
||||
for conn in connections:
|
||||
# Mirror DBeaver grouping: internal/features -> 'internal', otherwise by customer, else 'other'
|
||||
# Group by customer (internal/features -> 'internal', otherwise by customer)
|
||||
if conn['customer'] == 'internal' or conn['customer'].startswith('feature'):
|
||||
group_name = 'internal'
|
||||
elif conn['stage'] in ['stage', 'production']:
|
||||
|
|
@ -116,43 +166,61 @@ def group_connections(connections):
|
|||
else:
|
||||
group_name = 'other'
|
||||
|
||||
# Keep stage-based color coding
|
||||
if conn['stage'] == 'internal':
|
||||
conn_stage = 'internal'
|
||||
elif conn['stage'] == 'stage':
|
||||
conn_stage = 'stage'
|
||||
elif conn['stage'] == 'production':
|
||||
conn_stage = 'production'
|
||||
else:
|
||||
conn_stage = 'unknown'
|
||||
|
||||
# Use stage directly for color coding
|
||||
conn_yaml = create_connection_yaml(
|
||||
conn['name'],
|
||||
conn['addr'],
|
||||
conn['port'],
|
||||
conn_stage
|
||||
conn['stage']
|
||||
)
|
||||
|
||||
groups[group_name].append(conn_yaml)
|
||||
|
||||
return groups
|
||||
|
||||
def generate_yaml_content(groups):
|
||||
def format_connection_for_yaml(conn):
|
||||
"""Format a connection dict as YAML string with proper indentation"""
|
||||
yaml_str = yaml.dump(conn, default_flow_style=False, sort_keys=False)
|
||||
# Add list marker and proper indentation
|
||||
lines = yaml_str.split('\n')
|
||||
formatted_lines = []
|
||||
for i, line in enumerate(lines):
|
||||
if line.strip():
|
||||
# First line should start with " - "
|
||||
if i == 0:
|
||||
formatted_lines.append(f" - {line.strip()}")
|
||||
else:
|
||||
formatted_lines.append(f" {line}")
|
||||
return '\n'.join(formatted_lines)
|
||||
|
||||
def generate_yaml_content(groups, preserved_connections):
|
||||
"""Generate YAML content from SDM groups and preserved connections"""
|
||||
yaml_lines = []
|
||||
|
||||
# Prefer 'internal' first, then alphabetical by customer
|
||||
ordered_groups = []
|
||||
if 'internal' in groups:
|
||||
ordered_groups.append('internal')
|
||||
ordered_groups.extend(sorted([g for g in groups.keys() if g != 'internal']))
|
||||
|
||||
# Add preserved groups
|
||||
for group_name in preserved_connections.keys():
|
||||
if group_name not in ordered_groups:
|
||||
ordered_groups.append(group_name)
|
||||
|
||||
for group_name in ordered_groups:
|
||||
yaml_lines.append(f"- name: {group_name}")
|
||||
yaml_lines.append(" last_db: 0")
|
||||
yaml_lines.append(" type: group")
|
||||
yaml_lines.append(" connections:")
|
||||
|
||||
for conn_yaml in groups[group_name]:
|
||||
yaml_lines.append(conn_yaml)
|
||||
# Add connections (preserved first, then SDM)
|
||||
if group_name in preserved_connections:
|
||||
for conn_dict in preserved_connections[group_name]:
|
||||
yaml_lines.append(format_connection_for_yaml(conn_dict))
|
||||
if group_name in groups:
|
||||
for conn_yaml in groups[group_name]:
|
||||
yaml_lines.append(conn_yaml)
|
||||
|
||||
yaml_lines.append("")
|
||||
|
||||
|
|
@ -161,25 +229,33 @@ def generate_yaml_content(groups):
|
|||
def main():
|
||||
print("Generating TinyRDM connections.yaml from sdm status --json...")
|
||||
|
||||
output_file = os.path.expanduser('~/.config/TinyRDM/connections.yaml')
|
||||
|
||||
# Load existing connections to preserve non-SDM connections
|
||||
preserved_connections = load_existing_connections(output_file)
|
||||
preserved_count = sum(len(conns) for conns in preserved_connections.values())
|
||||
if preserved_count > 0:
|
||||
print(f"📁 Found {preserved_count} existing non-SDM connections")
|
||||
|
||||
sdm_data = run_sdm_status()
|
||||
if not sdm_data:
|
||||
return
|
||||
|
||||
connections = parse_redis_connections(sdm_data)
|
||||
print(f"Found {len(connections)} Redis connections")
|
||||
print(f"🔍 Found {len(connections)} Redis connections from SDM")
|
||||
|
||||
groups = group_connections(connections)
|
||||
|
||||
for group_name, conns in groups.items():
|
||||
print(f" {group_name}: {len(conns)} connections")
|
||||
|
||||
yaml_content = generate_yaml_content(groups)
|
||||
yaml_content = generate_yaml_content(groups, preserved_connections)
|
||||
|
||||
output_file = os.path.expanduser('~/.config/TinyRDM/connections.yaml')
|
||||
try:
|
||||
with open(output_file, 'w') as f:
|
||||
f.write(yaml_content)
|
||||
print(f"✅ Successfully generated {output_file}")
|
||||
if preserved_count > 0:
|
||||
print(f"💾 Preserved {preserved_count} non-SDM connections")
|
||||
except Exception as e:
|
||||
print(f"❌ Error writing file: {e}")
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue