Production sync - 2025-09-04

This commit is contained in:
2025-09-04 20:39:26 +00:00
parent 6f4b3675a9
commit 51f9ff56c1
7 changed files with 150 additions and 76 deletions

View File

@@ -210,7 +210,7 @@ class NetworkOrchestrator:
# Use defaults if config fails
return {
'elasticsearch': {
'host': 'INTERNAL_IP:9200',
'host': '192.168.100.85:9200',
'index': 'netflow-*'
},
'analysis': {
@@ -328,11 +328,11 @@ class NetworkOrchestrator:
return {
"top_talkers": {
"buckets": [
{"key": "INTERNAL_IP", "doc_count": 15000,
{"key": "192.168.100.50", "doc_count": 15000,
"bytes": {"value": 5000000}, "packets": {"value": 10000}},
{"key": "INTERNAL_IP", "doc_count": 12000,
{"key": "192.168.100.51", "doc_count": 12000,
"bytes": {"value": 4000000}, "packets": {"value": 8000}},
{"key": "INTERNAL_IP", "doc_count": 8000,
{"key": "192.168.100.11", "doc_count": 8000,
"bytes": {"value": 2000000}, "packets": {"value": 5000}},
{"key": "10.0.0.5", "doc_count": 6000,
"bytes": {"value": 1500000}, "packets": {"value": 3000}}

View File

@@ -46,7 +46,7 @@ class PipelineMonitor:
# Check AI Processor by looking at recent activity
ai_status = self.check_ai_processor_activity()
status.append(['AI Processor (INTERNAL_IP)', ai_status])
status.append(['AI Processor (192.168.100.86)', ai_status])
# Check deployment timer
try:
@@ -187,9 +187,9 @@ class PipelineMonitor:
print("Deployed: /shared/ai-gitops/deployed/")
print("\n🏗️ Architecture:")
print("Orchestrator VM: INTERNAL_IP (this VM)")
print("AI Processor VM: INTERNAL_IP")
print("Elasticsearch VM: INTERNAL_IP")
print("Orchestrator VM: 192.168.100.87 (this VM)")
print("AI Processor VM: 192.168.100.86")
print("Elasticsearch VM: 192.168.100.85")
print("Gitea Server: git.salmutt.dev")
print("\n📋 Pipeline Flow:")

View File

@@ -25,7 +25,7 @@ class SRXRollbackManager:
# SSH to SRX and get config
cmd = [
"ssh", "-o", "StrictHostKeyChecking=no",
"netops@INTERNAL_IP",
"netops@192.168.100.1",
"show configuration | display set | no-more"
]

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""
Enhanced pipeline runner with context support for split architecture
Works with AI processor running on separate VM (INTERNAL_IP)
Works with AI processor running on separate VM (192.168.100.86)
"""
import argparse
import json
@@ -191,7 +191,7 @@ def wait_for_ai_response(request_id, timeout=60):
time.sleep(1)
print(f"❌ Timeout waiting for AI response after {timeout} seconds")
print(" Check AI processor logs: ssh netops@INTERNAL_IP 'sudo tail /var/log/ai-processor/ai-processor.log'")
print(" Check AI processor logs: ssh netops@192.168.100.86 'sudo tail /var/log/ai-processor/ai-processor.log'")
return False
def create_pr():
@@ -222,7 +222,7 @@ def check_ai_processor_status():
# Try without sudo first (systemctl can check status without sudo)
result = subprocess.run(
["ssh", "netops@INTERNAL_IP", "systemctl is-active ai-processor"],
["ssh", "netops@192.168.100.86", "systemctl is-active ai-processor"],
capture_output=True,
text=True
)
@@ -233,7 +233,7 @@ def check_ai_processor_status():
else:
# Try checking if the process is running another way
result = subprocess.run(
["ssh", "netops@INTERNAL_IP", "ps aux | grep -v grep | grep ai_processor"],
["ssh", "netops@192.168.100.86", "ps aux | grep -v grep | grep ai_processor"],
capture_output=True,
text=True
)
@@ -306,7 +306,7 @@ def main():
# Step 4: Wait for AI processor to complete
if not wait_for_ai_response(request_id, args.timeout):
print("\n⚠️ AI processor may be busy or not running properly")
print(" Check status: ssh netops@INTERNAL_IP 'sudo systemctl status ai-processor'")
print(" Check status: ssh netops@192.168.100.86 'sudo systemctl status ai-processor'")
sys.exit(1)
# Step 5: Create PR (unless dry-run)

View File

@@ -285,7 +285,7 @@ if __name__ == "__main__":
# Test the SRX Manager
srx = SRXManager(
host="INTERNAL_IP",
host="192.168.100.1",
user="netops",
ssh_key="/home/netops/.ssh/srx_key"
)