dev: automated commit - 2025-10-19 12:15:20

This commit is contained in:
Mariano Z. 2025-10-19 12:15:20 -03:00
parent 6b67d64cef
commit b40e30baef
6 changed files with 404 additions and 34 deletions

View file

@ -0,0 +1,198 @@
#!/usr/bin/env python3
"""
Generate DBeaver data-sources.json from sdm status --json output
Organizes PostgreSQL connections by customer folders
Excludes readonly connections, includes activate connections
"""
import subprocess
import json
import re
import uuid
import os
from collections import defaultdict
def run_sdm_status():
try:
result = subprocess.run(['sdm', 'status', '--json'], capture_output=True, text=True)
if result.returncode != 0:
print(f"Error running sdm status: {result.stderr}")
return None
return json.loads(result.stdout)
except FileNotFoundError:
print("Error: sdm command not found")
return None
except json.JSONDecodeError as e:
print(f"Error parsing JSON: {e}")
return None
def parse_postgres_connections(sdm_data):
connections = []
for item in sdm_data:
if item.get('type') == 'postgres':
name = item.get('name', '')
address = item.get('address', '')
if 'readonly' in name:
continue
if ':' in address:
addr, port = address.split(':')
port = int(port)
else:
continue
env_info = parse_connection_name(name)
if env_info:
connections.append({
'name': name,
'addr': addr,
'port': port,
'environment': env_info['environment'],
'customer': env_info['customer'],
'stage': env_info['stage']
})
return connections
def parse_connection_name(name):
pattern = r'oc-([^-]+)-([^-]+)-.*'
match = re.match(pattern, name)
if not match:
return None
environment = match.group(1)
customer = match.group(2)
if environment in ['dev', 'nextrc', 'nextrc2']:
stage = 'internal'
elif environment in ['stage', 'stage2', 'uat']:
stage = 'stage'
elif environment in ['prod', 'prod2']:
stage = 'production'
else:
stage = 'unknown'
return {
'environment': environment,
'customer': customer,
'stage': stage
}
def create_dbeaver_connection(conn, connection_id):
if conn['customer'] == 'internal' or conn['customer'].startswith('feature'):
folder = 'internal'
elif conn['stage'] == 'stage':
folder = conn['customer']
elif conn['stage'] == 'production':
folder = conn['customer']
else:
folder = 'other'
display_name = conn['name'].replace('oc-', '').replace('-rds', '').replace('-activate', '')
if 'activate' in conn['name']:
display_name = f"activate-{display_name}"
if 'activate' in conn['name']:
default_db = "postgres"
db_url = f"jdbc:postgresql://{conn['addr']}:{conn['port']}/postgres"
else:
default_db = "member_dossier"
db_url = f"jdbc:postgresql://{conn['addr']}:{conn['port']}/member_dossier"
return {
"provider": "postgresql",
"driver": "postgres-jdbc",
"name": display_name,
"save-password": True,
"folder": folder,
"configuration": {
"host": conn['addr'],
"port": str(conn['port']),
"database": default_db,
"url": db_url,
"configurationType": "MANUAL",
"home": "/bin",
"type": conn['environment'],
"closeIdleConnection": False,
"provider-properties": {
"@dbeaver-show-non-default-db@": "true",
"@dbeaver-show-template-db@": "true",
"@dbeaver-show-unavailable-db@": "true",
"show-database-statistics": "false",
"@dbeaver-read-all-data-types-db@": "false",
"@dbeaver-use-prepared-statements-db@": "false",
"postgresql.dd.plain.string": "false",
"postgresql.dd.tag.string": "false"
},
"auth-model": "native"
}
}
def group_connections(connections):
folders = defaultdict(list)
for conn in connections:
if conn['customer'] == 'internal' or conn['customer'].startswith('feature'):
folder = 'internal'
elif conn['stage'] == 'stage':
folder = conn['customer']
elif conn['stage'] == 'production':
folder = conn['customer']
else:
folder = 'other'
folders[folder].append(conn)
return folders
def generate_dbeaver_config(connections):
folders = group_connections(connections)
folders_config = {}
for folder_name in folders.keys():
folders_config[folder_name] = {}
connections_config = {}
for conn in connections:
connection_id = f"postgres-jdbc-{uuid.uuid4().hex[:8]}-{uuid.uuid4().hex[:8]}"
conn_config = create_dbeaver_connection(conn, connection_id)
connections_config[connection_id] = conn_config
return {
"folders": folders_config,
"connections": connections_config
}
def main():
print("Generating DBeaver data-sources.json from sdm status --json...")
sdm_data = run_sdm_status()
if not sdm_data:
return
connections = parse_postgres_connections(sdm_data)
print(f"Found {len(connections)} PostgreSQL connections")
folders = group_connections(connections)
for folder_name, conns in folders.items():
print(f" {folder_name}: {len(conns)} connections")
dbeaver_config = generate_dbeaver_config(connections)
output_file = os.path.expanduser('~/.local/share/DBeaverData/workspace6/Stuzo/.dbeaver/data-sources.json')
try:
with open(output_file, 'w') as f:
json.dump(dbeaver_config, f, indent='\t')
print(f"✅ Successfully generated {output_file}")
print("📝 Note: You may need to restart DBeaver to see the new connections")
except Exception as e:
print(f"❌ Error writing file: {e}")
if __name__ == '__main__':
main()

View file

@ -0,0 +1,178 @@
#!/usr/bin/env python3
"""
Generate TinyRDM connections.yaml from sdm status --json output
Organizes Redis connections into Internal, Stage, and Production groups
Excludes activate and readonly connections
"""
import subprocess
import json
import re
import os
from collections import defaultdict
def run_sdm_status():
try:
result = subprocess.run(['sdm', 'status', '--json'], capture_output=True, text=True)
if result.returncode != 0:
print(f"Error running sdm status: {result.stderr}")
return None
return json.loads(result.stdout)
except FileNotFoundError:
print("Error: sdm command not found")
return None
except json.JSONDecodeError as e:
print(f"Error parsing JSON: {e}")
return None
def parse_redis_connections(sdm_data):
connections = []
for item in sdm_data:
if item.get('type') == 'redis':
name = item.get('name', '')
address = item.get('address', '')
if ('activate-cache' in name or 'activate-readonly' in name or
'readonly-redis' in name):
continue
if ':' in address:
addr, port = address.split(':')
port = int(port)
else:
continue
env_info = parse_connection_name(name)
if env_info:
connections.append({
'name': name,
'addr': addr,
'port': port,
'environment': env_info['environment'],
'customer': env_info['customer'],
'stage': env_info['stage']
})
return connections
def parse_connection_name(name):
pattern = r'oc-([^-]+)-([^-]+)-.*'
match = re.match(pattern, name)
if not match:
return None
environment = match.group(1)
customer = match.group(2)
if environment in ['dev', 'nextrc', 'nextrc2']:
stage = 'internal'
elif environment in ['stage', 'stage2', 'uat']:
stage = 'stage'
elif environment in ['prod', 'prod2']:
stage = 'production'
else:
stage = 'unknown'
return {
'environment': environment,
'customer': customer,
'stage': stage
}
def create_connection_yaml(name, addr, port, stage):
colors = {
'internal': '#4ECF60',
'stage': '#FFA500',
'production': '#FF0000'
}
color = colors.get(stage, '#808080')
return f""" - name: {name}
last_db: 0
network: tcp
addr: {addr}
port: {port}
default_filter: '*'
key_separator: ':'
conn_timeout: 60
exec_timeout: 60
db_filter_type: none
load_size: 10000
mark_color: '{color}'
refresh_interval: 5"""
def group_connections(connections):
groups = defaultdict(list)
for conn in connections:
if conn['customer'] == 'internal':
group_name = 'Internal'
conn_stage = 'internal'
elif conn['stage'] in ['stage']:
group_name = 'Stage'
conn_stage = 'stage'
elif conn['stage'] in ['production']:
group_name = 'Production'
conn_stage = 'production'
else:
continue
conn_yaml = create_connection_yaml(
conn['name'],
conn['addr'],
conn['port'],
conn_stage
)
groups[group_name].append(conn_yaml)
return groups
def generate_yaml_content(groups):
yaml_lines = []
group_order = ['Internal', 'Stage', 'Production']
for group_name in group_order:
if group_name in groups:
yaml_lines.append(f"- name: {group_name}")
yaml_lines.append(" last_db: 0")
yaml_lines.append(" type: group")
yaml_lines.append(" connections:")
for conn_yaml in groups[group_name]:
yaml_lines.append(conn_yaml)
yaml_lines.append("")
return '\n'.join(yaml_lines)
def main():
print("Generating TinyRDM connections.yaml from sdm status --json...")
sdm_data = run_sdm_status()
if not sdm_data:
return
connections = parse_redis_connections(sdm_data)
print(f"Found {len(connections)} Redis connections")
groups = group_connections(connections)
for group_name, conns in groups.items():
print(f" {group_name}: {len(conns)} connections")
yaml_content = generate_yaml_content(groups)
output_file = os.path.expanduser('~/.config/TinyRDM/connections.yaml')
try:
with open(output_file, 'w') as f:
f.write(yaml_content)
print(f"✅ Successfully generated {output_file}")
except Exception as e:
print(f"❌ Error writing file: {e}")
if __name__ == '__main__':
main()