Production sync - 2025-09-05
This commit is contained in:
@@ -1,56 +1,50 @@
|
||||
# Network AI Orchestrator Configuration
|
||||
elasticsearch:
|
||||
host: "192.168.100.85:9200"
|
||||
index: "netflow-*"
|
||||
verify_certs: false
|
||||
timeout: 30
|
||||
|
||||
ai:
|
||||
max_retries: 3
|
||||
request_timeout: 120
|
||||
analysis:
|
||||
interval_minutes: 60
|
||||
window_hours: 168
|
||||
min_traffic_bytes: 1000000
|
||||
|
||||
pr_creation:
|
||||
enabled: true
|
||||
frequency: "smart" # Options: weekly, daily, manual, smart
|
||||
triggers:
|
||||
- high_traffic anomaly #Create PR if traffic spike
|
||||
- security_event #Create PR if security issue
|
||||
- scheduled: "weekly"
|
||||
thresholds:
|
||||
traffic_spike: 200 #200% increase triggers PR
|
||||
new_hosts: 10 #10+ new IPs triggers PR
|
||||
day_of_week: "saturday" # 0=Monday, 6=Sunday
|
||||
hour_of_day: 22 # 24-hour format (9 = 9 AM)
|
||||
skip_if_pending: true # Don't create if PR already open
|
||||
min_days_between: 7 # Minimum days between PRs
|
||||
|
||||
window_hours: 168
|
||||
elasticsearch:
|
||||
host: 192.168.100.85:9200
|
||||
index: netflow-*
|
||||
timeout: 30
|
||||
verify_certs: false
|
||||
gitea:
|
||||
url: "https://git.salmutt.dev"
|
||||
repo: "netops/srx-config"
|
||||
branch: main
|
||||
labels:
|
||||
- ai-generated
|
||||
- auto-config
|
||||
- pending-review
|
||||
repo: netops/srx-config
|
||||
token: "${GITEA_TOKEN}"
|
||||
branch: "main"
|
||||
labels: ["ai-generated", "auto-config", "pending-review"]
|
||||
|
||||
srx:
|
||||
host: "192.168.100.1"
|
||||
port: 830
|
||||
username: "netops"
|
||||
ssh_key: "/home/netops/.ssh/srx_key"
|
||||
|
||||
url: https://git.salmutt.dev
|
||||
logging:
|
||||
level: INFO
|
||||
max_file_size: 100MB
|
||||
retention_days: 30
|
||||
pr_creation:
|
||||
day_of_week: saturday
|
||||
enabled: true
|
||||
frequency: smart
|
||||
hour_of_day: 22
|
||||
min_days_between: 1
|
||||
skip_if_pending: true
|
||||
thresholds:
|
||||
new_hosts: 10
|
||||
traffic_spike: 200
|
||||
triggers:
|
||||
- high_traffic anomaly
|
||||
- security_event
|
||||
- scheduled: weekly
|
||||
shared_storage:
|
||||
path: "/shared/ai-gitops"
|
||||
|
||||
path: /shared/ai-gitops
|
||||
srx:
|
||||
host: 192.168.100.1
|
||||
port: 830
|
||||
ssh_key: /home/netops/.ssh/srx_key
|
||||
username: netops
|
||||
state_tracking:
|
||||
enabled: true
|
||||
state_file: '/shared/ai-gitops/state/orchestrator_state.json'
|
||||
state_file: /shared/ai-gitops/state/orchestrator_state.json
|
||||
track_pr_history: true
|
||||
|
||||
ai:
|
||||
request_timeout: 120
|
||||
max_retries: 3
|
||||
|
||||
logging:
|
||||
level: "INFO"
|
||||
max_file_size: "100MB"
|
||||
retention_days: 30
|
||||
|
||||
@@ -45,6 +45,29 @@ def main():
|
||||
|
||||
# Extract details
|
||||
suggestions = pr_data.get('suggestions', '')
|
||||
|
||||
# Handle if suggestions is a list of dicts or strings
|
||||
if isinstance(suggestions, list):
|
||||
|
||||
# Format suggestions properly
|
||||
if isinstance(suggestions, list) and suggestions:
|
||||
if isinstance(suggestions[0], dict):
|
||||
# Extract just the config lines
|
||||
formatted_lines = []
|
||||
for item in suggestions:
|
||||
if isinstance(item, dict) and 'config' in item:
|
||||
formatted_lines.append(item['config'])
|
||||
suggestions_preview = formatted_lines
|
||||
else:
|
||||
suggestions_preview = suggestions
|
||||
else:
|
||||
suggestions_preview = suggestions.split('\n') if isinstance(suggestions, str) else suggestions
|
||||
# If list of dicts with 'config' key
|
||||
if suggestions and isinstance(suggestions[0], dict):
|
||||
suggestions = '\n'.join([s.get('config', '') for s in suggestions if s.get('config')])
|
||||
else:
|
||||
# If list of strings
|
||||
suggestions = '\n'.join(suggestions)
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M")
|
||||
|
||||
# Show preview
|
||||
|
||||
@@ -518,7 +518,7 @@ Include comments explaining each change."""
|
||||
return False
|
||||
|
||||
# Check for existing pending PR
|
||||
if self.state.get('pending_pr'):
|
||||
if self.state.get('pending_pr') is not None and self.state.get('pending_pr') != False:
|
||||
logger.info(f"Skipping PR creation - pending PR exists: {self.state['pending_pr']}")
|
||||
return False
|
||||
|
||||
@@ -534,11 +534,13 @@ Include comments explaining each change."""
|
||||
return False
|
||||
|
||||
# Create the PR
|
||||
pr_info = gitea.create_pr_with_config(
|
||||
srx_config=srx_config,
|
||||
title=f"AI Network Configuration Suggestions - {datetime.now().strftime('%B %d, %Y')}",
|
||||
description=None # Will auto-generate
|
||||
# Use working PR creation pipeline
|
||||
import subprocess
|
||||
result = subprocess.run(
|
||||
'/home/netops/orchestrator/safe_pr_creation.sh',
|
||||
shell=True, capture_output=True, text=True
|
||||
)
|
||||
pr_info = {'number': 'auto-created'} if 'SUCCESS' in result.stdout else None
|
||||
|
||||
if pr_info:
|
||||
# Update state with PR information
|
||||
@@ -608,7 +610,7 @@ Include comments explaining each change."""
|
||||
# If PR is closed or merged, clear the pending_pr flag
|
||||
if pr_status['state'] == 'closed':
|
||||
logger.info(f"PR #{pr_status['number']} has been closed")
|
||||
self.state['pending_pr'] = None
|
||||
del self.state['pending_pr']
|
||||
self.state['last_pr_status'] = 'closed'
|
||||
self.state['last_pr_closed'] = datetime.now().isoformat()
|
||||
|
||||
|
||||
Reference in New Issue
Block a user