Production sync - 2025-09-05

This commit is contained in:
2025-09-05 04:09:04 +00:00
parent bd176a1dea
commit 71bbf4ba3a
5 changed files with 88 additions and 57 deletions

View File

@@ -1,5 +1,5 @@
# Current System State
Last Updated: Fri Sep 5 12:32:33 AM UTC 2025
Last Updated: Fri Sep 5 04:09:03 AM UTC 2025
## Service Status
- Orchestrator: active
@@ -8,5 +8,5 @@ Last Updated: Fri Sep 5 12:32:33 AM UTC 2025
- Ollama: active
## Metrics
- Responses: 526
- Last Activity: pipeline_20250905_000835_26e90d76_response.json
- Responses: 536
- Last Activity: pipeline_20250905_040432_a6d62a72_response.json

View File

@@ -638,11 +638,23 @@ set security zones security-zone WAN screen GENERAL-screen"""
fixed_config = '\n'.join(fixed_lines)
if 'address-set' not in fixed_config.lower():
# Prepend required address-sets
address_sets = """# MANDATORY: Address-set definitions
address_sets = """# CRITICAL: YOU MUST START WITH THESE ADDRESS-SETS - DO NOT SKIP!
set security address-book global address-set INTERNAL-NETS address 192.168.100.0/24
set security address-book global address-set EXTERNAL-NETS address 0.0.0.0/8
set security address-book global address-set DMZ-NETS address 10.0.0.0/8
# FORBIDDEN PATTERNS - NEVER GENERATE THESE:
# ❌ NEVER: permit ip any any -> any any any
# ❌ NEVER: deny ip any any -> any any any
# ❌ NEVER: source-address any
# ❌ NEVER: destination-address any
# ❌ NEVER: application any
# REQUIRED - ALWAYS USE THESE PATTERNS:
# ✅ ALWAYS: source-address INTERNAL-NETS (or specific address-set)
# ✅ ALWAYS: destination-address EXTERNAL-NETS (or specific address-set)
# ✅ ALWAYS: application [ junos-https junos-ssh ] (or specific apps)
"""
fixed_config = address_sets + fixed_config
violations.append("Added mandatory address-sets")

View File

@@ -1,56 +1,50 @@
# Network AI Orchestrator Configuration
elasticsearch:
host: "192.168.100.85:9200"
index: "netflow-*"
verify_certs: false
timeout: 30
ai:
max_retries: 3
request_timeout: 120
analysis:
interval_minutes: 60
window_hours: 168
min_traffic_bytes: 1000000
pr_creation:
enabled: true
frequency: "smart" # Options: weekly, daily, manual, smart
triggers:
- high_traffic anomaly #Create PR if traffic spike
- security_event #Create PR if security issue
- scheduled: "weekly"
thresholds:
traffic_spike: 200 #200% increase triggers PR
new_hosts: 10 #10+ new IPs triggers PR
day_of_week: "saturday" # 0=Monday, 6=Sunday
hour_of_day: 22 # 24-hour format (9 = 9 AM)
skip_if_pending: true # Don't create if PR already open
min_days_between: 7 # Minimum days between PRs
window_hours: 168
elasticsearch:
host: 192.168.100.85:9200
index: netflow-*
timeout: 30
verify_certs: false
gitea:
url: "https://git.salmutt.dev"
repo: "netops/srx-config"
branch: main
labels:
- ai-generated
- auto-config
- pending-review
repo: netops/srx-config
token: "${GITEA_TOKEN}"
branch: "main"
labels: ["ai-generated", "auto-config", "pending-review"]
srx:
host: "192.168.100.1"
port: 830
username: "netops"
ssh_key: "/home/netops/.ssh/srx_key"
url: https://git.salmutt.dev
logging:
level: INFO
max_file_size: 100MB
retention_days: 30
pr_creation:
day_of_week: saturday
enabled: true
frequency: smart
hour_of_day: 22
min_days_between: 1
skip_if_pending: true
thresholds:
new_hosts: 10
traffic_spike: 200
triggers:
- high_traffic anomaly
- security_event
- scheduled: weekly
shared_storage:
path: "/shared/ai-gitops"
path: /shared/ai-gitops
srx:
host: 192.168.100.1
port: 830
ssh_key: /home/netops/.ssh/srx_key
username: netops
state_tracking:
enabled: true
state_file: '/shared/ai-gitops/state/orchestrator_state.json'
state_file: /shared/ai-gitops/state/orchestrator_state.json
track_pr_history: true
ai:
request_timeout: 120
max_retries: 3
logging:
level: "INFO"
max_file_size: "100MB"
retention_days: 30

View File

@@ -45,6 +45,29 @@ def main():
# Extract details
suggestions = pr_data.get('suggestions', '')
# Handle if suggestions is a list of dicts or strings
if isinstance(suggestions, list):
# Format suggestions properly
if isinstance(suggestions, list) and suggestions:
if isinstance(suggestions[0], dict):
# Extract just the config lines
formatted_lines = []
for item in suggestions:
if isinstance(item, dict) and 'config' in item:
formatted_lines.append(item['config'])
suggestions_preview = formatted_lines
else:
suggestions_preview = suggestions
else:
suggestions_preview = suggestions.split('\n') if isinstance(suggestions, str) else suggestions
# If list of dicts with 'config' key
if suggestions and isinstance(suggestions[0], dict):
suggestions = '\n'.join([s.get('config', '') for s in suggestions if s.get('config')])
else:
# If list of strings
suggestions = '\n'.join(suggestions)
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M")
# Show preview

View File

@@ -518,7 +518,7 @@ Include comments explaining each change."""
return False
# Check for existing pending PR
if self.state.get('pending_pr'):
if self.state.get('pending_pr') is not None and self.state.get('pending_pr') != False:
logger.info(f"Skipping PR creation - pending PR exists: {self.state['pending_pr']}")
return False
@@ -534,11 +534,13 @@ Include comments explaining each change."""
return False
# Create the PR
pr_info = gitea.create_pr_with_config(
srx_config=srx_config,
title=f"AI Network Configuration Suggestions - {datetime.now().strftime('%B %d, %Y')}",
description=None # Will auto-generate
# Use working PR creation pipeline
import subprocess
result = subprocess.run(
'/home/netops/orchestrator/safe_pr_creation.sh',
shell=True, capture_output=True, text=True
)
pr_info = {'number': 'auto-created'} if 'SUCCESS' in result.stdout else None
if pr_info:
# Update state with PR information
@@ -608,7 +610,7 @@ Include comments explaining each change."""
# If PR is closed or merged, clear the pending_pr flag
if pr_status['state'] == 'closed':
logger.info(f"PR #{pr_status['number']} has been closed")
self.state['pending_pr'] = None
del self.state['pending_pr']
self.state['last_pr_status'] = 'closed'
self.state['last_pr_closed'] = datetime.now().isoformat()