dev: automated commit - 2025-10-19 12:15:20

This commit is contained in:
Mariano Z. 2025-10-19 12:15:20 -03:00
parent 6b67d64cef
commit b40e30baef
6 changed files with 404 additions and 34 deletions

View file

@ -0,0 +1,198 @@
#!/usr/bin/env python3
"""
Generate DBeaver data-sources.json from sdm status --json output
Organizes PostgreSQL connections by customer folders
Excludes readonly connections, includes activate connections
"""
import subprocess
import json
import re
import uuid
import os
from collections import defaultdict
def run_sdm_status():
try:
result = subprocess.run(['sdm', 'status', '--json'], capture_output=True, text=True)
if result.returncode != 0:
print(f"Error running sdm status: {result.stderr}")
return None
return json.loads(result.stdout)
except FileNotFoundError:
print("Error: sdm command not found")
return None
except json.JSONDecodeError as e:
print(f"Error parsing JSON: {e}")
return None
def parse_postgres_connections(sdm_data):
connections = []
for item in sdm_data:
if item.get('type') == 'postgres':
name = item.get('name', '')
address = item.get('address', '')
if 'readonly' in name:
continue
if ':' in address:
addr, port = address.split(':')
port = int(port)
else:
continue
env_info = parse_connection_name(name)
if env_info:
connections.append({
'name': name,
'addr': addr,
'port': port,
'environment': env_info['environment'],
'customer': env_info['customer'],
'stage': env_info['stage']
})
return connections
def parse_connection_name(name):
pattern = r'oc-([^-]+)-([^-]+)-.*'
match = re.match(pattern, name)
if not match:
return None
environment = match.group(1)
customer = match.group(2)
if environment in ['dev', 'nextrc', 'nextrc2']:
stage = 'internal'
elif environment in ['stage', 'stage2', 'uat']:
stage = 'stage'
elif environment in ['prod', 'prod2']:
stage = 'production'
else:
stage = 'unknown'
return {
'environment': environment,
'customer': customer,
'stage': stage
}
def create_dbeaver_connection(conn, connection_id):
if conn['customer'] == 'internal' or conn['customer'].startswith('feature'):
folder = 'internal'
elif conn['stage'] == 'stage':
folder = conn['customer']
elif conn['stage'] == 'production':
folder = conn['customer']
else:
folder = 'other'
display_name = conn['name'].replace('oc-', '').replace('-rds', '').replace('-activate', '')
if 'activate' in conn['name']:
display_name = f"activate-{display_name}"
if 'activate' in conn['name']:
default_db = "postgres"
db_url = f"jdbc:postgresql://{conn['addr']}:{conn['port']}/postgres"
else:
default_db = "member_dossier"
db_url = f"jdbc:postgresql://{conn['addr']}:{conn['port']}/member_dossier"
return {
"provider": "postgresql",
"driver": "postgres-jdbc",
"name": display_name,
"save-password": True,
"folder": folder,
"configuration": {
"host": conn['addr'],
"port": str(conn['port']),
"database": default_db,
"url": db_url,
"configurationType": "MANUAL",
"home": "/bin",
"type": conn['environment'],
"closeIdleConnection": False,
"provider-properties": {
"@dbeaver-show-non-default-db@": "true",
"@dbeaver-show-template-db@": "true",
"@dbeaver-show-unavailable-db@": "true",
"show-database-statistics": "false",
"@dbeaver-read-all-data-types-db@": "false",
"@dbeaver-use-prepared-statements-db@": "false",
"postgresql.dd.plain.string": "false",
"postgresql.dd.tag.string": "false"
},
"auth-model": "native"
}
}
def group_connections(connections):
folders = defaultdict(list)
for conn in connections:
if conn['customer'] == 'internal' or conn['customer'].startswith('feature'):
folder = 'internal'
elif conn['stage'] == 'stage':
folder = conn['customer']
elif conn['stage'] == 'production':
folder = conn['customer']
else:
folder = 'other'
folders[folder].append(conn)
return folders
def generate_dbeaver_config(connections):
folders = group_connections(connections)
folders_config = {}
for folder_name in folders.keys():
folders_config[folder_name] = {}
connections_config = {}
for conn in connections:
connection_id = f"postgres-jdbc-{uuid.uuid4().hex[:8]}-{uuid.uuid4().hex[:8]}"
conn_config = create_dbeaver_connection(conn, connection_id)
connections_config[connection_id] = conn_config
return {
"folders": folders_config,
"connections": connections_config
}
def main():
print("Generating DBeaver data-sources.json from sdm status --json...")
sdm_data = run_sdm_status()
if not sdm_data:
return
connections = parse_postgres_connections(sdm_data)
print(f"Found {len(connections)} PostgreSQL connections")
folders = group_connections(connections)
for folder_name, conns in folders.items():
print(f" {folder_name}: {len(conns)} connections")
dbeaver_config = generate_dbeaver_config(connections)
output_file = os.path.expanduser('~/.local/share/DBeaverData/workspace6/Stuzo/.dbeaver/data-sources.json')
try:
with open(output_file, 'w') as f:
json.dump(dbeaver_config, f, indent='\t')
print(f"✅ Successfully generated {output_file}")
print("📝 Note: You may need to restart DBeaver to see the new connections")
except Exception as e:
print(f"❌ Error writing file: {e}")
if __name__ == '__main__':
main()

View file

@ -0,0 +1,178 @@
#!/usr/bin/env python3
"""
Generate TinyRDM connections.yaml from sdm status --json output
Organizes Redis connections into Internal, Stage, and Production groups
Excludes activate and readonly connections
"""
import subprocess
import json
import re
import os
from collections import defaultdict
def run_sdm_status():
try:
result = subprocess.run(['sdm', 'status', '--json'], capture_output=True, text=True)
if result.returncode != 0:
print(f"Error running sdm status: {result.stderr}")
return None
return json.loads(result.stdout)
except FileNotFoundError:
print("Error: sdm command not found")
return None
except json.JSONDecodeError as e:
print(f"Error parsing JSON: {e}")
return None
def parse_redis_connections(sdm_data):
connections = []
for item in sdm_data:
if item.get('type') == 'redis':
name = item.get('name', '')
address = item.get('address', '')
if ('activate-cache' in name or 'activate-readonly' in name or
'readonly-redis' in name):
continue
if ':' in address:
addr, port = address.split(':')
port = int(port)
else:
continue
env_info = parse_connection_name(name)
if env_info:
connections.append({
'name': name,
'addr': addr,
'port': port,
'environment': env_info['environment'],
'customer': env_info['customer'],
'stage': env_info['stage']
})
return connections
def parse_connection_name(name):
pattern = r'oc-([^-]+)-([^-]+)-.*'
match = re.match(pattern, name)
if not match:
return None
environment = match.group(1)
customer = match.group(2)
if environment in ['dev', 'nextrc', 'nextrc2']:
stage = 'internal'
elif environment in ['stage', 'stage2', 'uat']:
stage = 'stage'
elif environment in ['prod', 'prod2']:
stage = 'production'
else:
stage = 'unknown'
return {
'environment': environment,
'customer': customer,
'stage': stage
}
def create_connection_yaml(name, addr, port, stage):
colors = {
'internal': '#4ECF60',
'stage': '#FFA500',
'production': '#FF0000'
}
color = colors.get(stage, '#808080')
return f""" - name: {name}
last_db: 0
network: tcp
addr: {addr}
port: {port}
default_filter: '*'
key_separator: ':'
conn_timeout: 60
exec_timeout: 60
db_filter_type: none
load_size: 10000
mark_color: '{color}'
refresh_interval: 5"""
def group_connections(connections):
groups = defaultdict(list)
for conn in connections:
if conn['customer'] == 'internal':
group_name = 'Internal'
conn_stage = 'internal'
elif conn['stage'] in ['stage']:
group_name = 'Stage'
conn_stage = 'stage'
elif conn['stage'] in ['production']:
group_name = 'Production'
conn_stage = 'production'
else:
continue
conn_yaml = create_connection_yaml(
conn['name'],
conn['addr'],
conn['port'],
conn_stage
)
groups[group_name].append(conn_yaml)
return groups
def generate_yaml_content(groups):
yaml_lines = []
group_order = ['Internal', 'Stage', 'Production']
for group_name in group_order:
if group_name in groups:
yaml_lines.append(f"- name: {group_name}")
yaml_lines.append(" last_db: 0")
yaml_lines.append(" type: group")
yaml_lines.append(" connections:")
for conn_yaml in groups[group_name]:
yaml_lines.append(conn_yaml)
yaml_lines.append("")
return '\n'.join(yaml_lines)
def main():
print("Generating TinyRDM connections.yaml from sdm status --json...")
sdm_data = run_sdm_status()
if not sdm_data:
return
connections = parse_redis_connections(sdm_data)
print(f"Found {len(connections)} Redis connections")
groups = group_connections(connections)
for group_name, conns in groups.items():
print(f" {group_name}: {len(conns)} connections")
yaml_content = generate_yaml_content(groups)
output_file = os.path.expanduser('~/.config/TinyRDM/connections.yaml')
try:
with open(output_file, 'w') as f:
f.write(yaml_content)
print(f"✅ Successfully generated {output_file}")
except Exception as e:
print(f"❌ Error writing file: {e}")
if __name__ == '__main__':
main()

View file

@ -2,38 +2,31 @@
mkcert = "latest" mkcert = "latest"
usage = "latest" usage = "latest"
"ubi:a-h/templ" = "latest"
"ubi:stern/stern" = "latest"
#Some optional utilities
"ubi:BurntSushi/ripgrep" = { version = "latest", exe = "rg" }
"ubi:sharkdp/fd" = "latest"
"ubi:sharkdp/bat" = "latest"
"ubi:cli/cli" = { version = "latest", exe = "gh" }
"ubi:jdx/mise" = "latest"
[settings] [settings]
# plugins can read the versions files used by other version managers (if enabled by the plugin) legacy_version_file = true
# for example, .nvmrc in the case of node's nvm always_keep_download = false
legacy_version_file = true # enabled by default (unlike asdf) always_keep_install = false
plugin_autoupdate_last_check_duration = '1 week'
# configure `mise install` to always keep the downloaded archive
always_keep_download = false # deleted after install by default
always_keep_install = false # deleted on failure by default
# configure how frequently (in minutes) to fetch updated plugin repository changes
# this is updated whenever a new runtime is installed
# (note: this isn't currently implemented but there are plans to add it: https://github.com/jdx/mise/issues/128)
plugin_autoupdate_last_check_duration = '1 week' # set to 0 to disable updates
# config files with these prefixes will be trusted by default
trusted_config_paths = [] trusted_config_paths = []
verbose = false
verbose = false # set to true to see full installation output, see `MISE_VERBOSE` asdf_compat = false
asdf_compat = false # set to true to ensure .tool-versions will be compatible with asdf, see `MISE_ASDF_COMPAT` jobs = 4
#http_timeout = 30 # set the timeout for http requests in seconds, see `MISE_HTTP_TIMEOUT` raw = false
jobs = 4 # number of plugins or runtimes to install in parallel. The default is `4`. yes = false
raw = false # set to true to directly pipe plugins to stdin/stdout/stderr not_found_auto_install = true
yes = false # set to true to automatically answer yes to all prompts task_output = "prefix"
paranoid = false
not_found_auto_install = true # see MISE_NOT_FOUND_AUTO_INSTALL env_file = ".env"
task_output = "prefix" # see Tasks Runner for more information experimental = true
paranoid = false # see MISE_PARANOID
env_file = '.env' # load env vars from a dotenv file, see `MISE_ENV_FILE`
experimental = true # enable experimental features
# configure messages displayed when entering directories with config files
status = { missing_tools = "if_other_versions_installed", show_env = false, show_tools = false } status = { missing_tools = "if_other_versions_installed", show_env = false, show_tools = false }
idiomatic_version_file_enable_tools = ["node"] idiomatic_version_file_enable_tools = ["node"]

View file

@ -22,6 +22,7 @@ bindsym {
$mod+o exec ~/.local/bin/launch-or-focus obsidian "cd /home/forbi/Documents/Vault && $term --class obsidian nvim" $mod+o exec ~/.local/bin/launch-or-focus obsidian "cd /home/forbi/Documents/Vault && $term --class obsidian nvim"
$mod+e exec pcmanfm $mod+e exec pcmanfm
$mod+y exec clapboard $mod+y exec clapboard
$mod+b exec rofi-rbw --action copy
} }
# Window Management # Window Management

View file

@ -84,7 +84,7 @@ set -g @plugin 'tmux-plugins/tpm'
set -g @plugin 'tmux-plugins/tmux-sensible' set -g @plugin 'tmux-plugins/tmux-sensible'
set -g @plugin 'tmux-plugins/tmux-yank' set -g @plugin 'tmux-plugins/tmux-yank'
set -g @plugin 'catppuccin/tmux#v0.2.0' set -g @plugin 'catppuccin/tmux#v0.2.0'
set -g @plugin 'tmux-plugins/tmux-battery' set -g @plugin 'tmux-plugins/tmux-battery' # Re-enabled with our fixes
set -g @plugin 'MaximilianGaedig/tmux-filter' set -g @plugin 'MaximilianGaedig/tmux-filter'
# Plugin Settings # Plugin Settings

View file

@ -1,5 +1,3 @@
# Efficient PATH management
function update_path() { function update_path() {
# Define directories to add to PATH # Define directories to add to PATH
local dirs=( local dirs=(
@ -10,6 +8,8 @@ function update_path() {
"$HOME/.local/share/npm/bin" "$HOME/.local/share/npm/bin"
"$HOME/.local/share/cargo/bin" "$HOME/.local/share/cargo/bin"
"$HOME/.local/share/go/bin" "$HOME/.local/share/go/bin"
"$HOME/.local/share/flatpak/exports/share"
"/var/lib/flatpak/exports/share"
) )
# Prepare new PATH variable # Prepare new PATH variable