Skip to content

Commit cea6f14

Browse files
committed
update
1 parent c712833 commit cea6f14

16 files changed

+922
-0
lines changed
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
from flask import Blueprint, request, jsonify, Response
2+
from backend.core.cortex_engine import CortexEngine
3+
import threading
4+
5+
cortex_bp = Blueprint('cortex', __name__)
6+
7+
# Singleton Cortex Instance
8+
cortex = CortexEngine()
9+
10+
@cortex_bp.route('/cortex/execute', methods=['POST'])
11+
def execute_job():
12+
"""
13+
Executes a manufacturing profile.
14+
JSON Body: { "profile": {...}, "optimize": true }
15+
"""
16+
data = request.json
17+
profile = data.get("profile")
18+
optimize = data.get("optimize", True)
19+
20+
def generate():
21+
for event in cortex.execute_job(profile, optimize):
22+
yield f"{event}\n"
23+
24+
return Response(generate(), mimetype='text/plain')
25+
26+
@cortex_bp.route('/cortex/evolve', methods=['POST'])
27+
def evolve_job():
28+
"""
29+
Just runs the discovery engine and returns the optimized profile.
30+
"""
31+
data = request.json
32+
profile = data.get("profile")
33+
best_mutant = cortex.optimizer.evolve_profile(profile)
34+
return jsonify(best_mutant)
35+
36+
@cortex_bp.route('/cortex/status', methods=['GET'])
37+
def status():
38+
"""
39+
Returns the current Covalent Spectrum state (Hex Trace).
40+
"""
41+
# Create a dummy trace for current state
42+
spectrum = cortex.streamer.spectrum.get_state()
43+
trace = cortex.streamer.hex_logger.log_trace(spectrum, 999)
44+
return jsonify({
45+
"status": "ONLINE",
46+
"spectrum": str(spectrum),
47+
"hex_trace": trace,
48+
"mode": "CORTEX_ACTIVE"
49+
})
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
from flask import Blueprint, request, jsonify, Response, stream_with_context
2+
import logging
3+
from backend.cms.services.dopamine_engine import DopamineEngine
4+
from backend.integrations.synapse.gcode_streamer import GCodeStreamer, MockRepo
5+
6+
synapse_bp = Blueprint('synapse', __name__)
7+
logger = logging.getLogger(__name__)
8+
9+
# Initialize Engine (Mock Repo for now, or real one)
10+
repo = MockRepo()
11+
engine = DopamineEngine(repo)
12+
streamer = GCodeStreamer(engine)
13+
14+
@synapse_bp.route('/stream', methods=['POST'])
15+
def stream_gcode():
16+
"""
17+
Endpoint to stream G-Code.
18+
Expects textual G-Code in body.
19+
Returns a streamed response of processed G-Code lines.
20+
"""
21+
gcode_text = request.data.decode('utf-8')
22+
gcode_lines = gcode_text.splitlines()
23+
24+
def generate():
25+
for line in streamer.stream(gcode_lines):
26+
yield f"{line}\n"
27+
28+
return Response(stream_with_context(generate()), mimetype='text/plain')
29+
30+
@synapse_bp.route('/status', methods=['GET'])
31+
def get_status():
32+
"""
33+
Returns the current status of the Synapse.
34+
"""
35+
return jsonify({
36+
"safety_interdictions": streamer.safety_interdictions,
37+
"is_paused": streamer.is_paused,
38+
"active_machine": streamer.machine_id
39+
})
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
#!/bin/bash
2+
echo "Starting Cortex Stress Test..."
3+
4+
# 1. Start Sysbench (CPU Load) in background
5+
echo "Generating Synthetic Load (Sysbench)..."
6+
sysbench cpu --cpu-max-prime=10000 --threads=4 run > sysbench_output.log &
7+
SYSBENCH_PID=$!
8+
9+
# 2. Query Cortex Status while under load
10+
echo "Querying Cortex API under load..."
11+
for i in {1..5}; do
12+
curl -s http://localhost:5000/cortex/status
13+
sleep 1
14+
done
15+
16+
# 3. Cleanup
17+
echo "Stopping Load..."
18+
kill $SYSBENCH_PID
19+
echo "Test Complete. Check sysbench_output.log for raw metrics."
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
import logging
2+
from typing import Dict, Any, Generator
3+
4+
# Sub-Engines
5+
try:
6+
from backend.cms.services.dopamine_engine import DopamineEngine
7+
except ImportError:
8+
class DopamineEngine: # Mock
9+
pass
10+
11+
from backend.core.parallel_streamer import ParallelStreamer
12+
from backend.core.evolutionary_optimizer import EvolutionaryOptimizer
13+
from backend.core.hex_logger import HexTraceLogger
14+
15+
class CortexEngine:
16+
"""
17+
The 'Unified Engine' that orchestrates the entire 'Open Mechanic' architecture.
18+
Integrates:
19+
- Parallel Execution (Streamer)
20+
- Discovery (Evolutionary Optimizer)
21+
- Tracing (Hex Logger)
22+
- Biological Logic (Dopamine Engine)
23+
"""
24+
def __init__(self):
25+
self.logger = logging.getLogger("CortexEngine")
26+
27+
# 1. Initialize Core Engines
28+
self.dopamine = DopamineEngine()
29+
self.streamer = ParallelStreamer(self.dopamine)
30+
self.optimizer = EvolutionaryOptimizer()
31+
self.tracer = HexTraceLogger()
32+
33+
self.logger.info("Cortex Engine Initialized. Architecture Extended.")
34+
35+
def execute_job(self, profile: Dict[str, Any], optimize: bool = True) -> Generator[Dict, None, None]:
36+
"""
37+
Executes a manufacturing job.
38+
"""
39+
self.logger.info(f"Received Job. Optimize={optimize}")
40+
41+
# 2. Optimization Phase (Discovery)
42+
final_profile = profile
43+
if optimize:
44+
self.logger.info("Running Evolutionary Optimizer...")
45+
final_profile = self.optimizer.evolve_profile(profile)
46+
self.logger.info("Optimization Complete. Best Mutant Selected.")
47+
48+
# 3. Execution Phase (Parallel Streamer)
49+
for event in self.streamer.execute_profile(final_profile):
50+
# Pass through events
51+
yield event
52+
53+
def run_nightly_discovery(self, profiles: list[Dict]):
54+
"""
55+
Runs the optimizer on a batch of profiles to find new efficiencies overnight.
56+
The 'Best Functionalities' you don't know about yet.
57+
"""
58+
results = []
59+
for p in profiles:
60+
best = self.optimizer.evolve_profile(p)
61+
results.append(best)
62+
return results
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
import numpy as np
2+
import threading
3+
import logging
4+
5+
class TensorSpectrum:
6+
"""
7+
The 'Covalent' Part.
8+
A shared memory spectrum (Tensor) that represents the machine's holistic state.
9+
Parallel pools (Neural, Grid) inject their 'Electron Pairs' (Data) into this spectrum.
10+
11+
Indices:
12+
0: Cortisol (Biological Stress)
13+
1: Vibration (Physical Instability)
14+
2: Grid Risk (Collision Probability)
15+
3: Thermal Load (Temperature)
16+
"""
17+
def __init__(self):
18+
self.lock = threading.Lock()
19+
self.spectrum = np.zeros(4, dtype=np.float32)
20+
self.logger = logging.getLogger("Covalent.Spectrum")
21+
22+
def bond(self, index: int, value: float):
23+
"""
24+
Injects a value into the spectrum (forming a bond).
25+
Thread-safe.
26+
"""
27+
with self.lock:
28+
# Simple blending logic (Moving Average or Max Hold)
29+
self.spectrum[index] = max(self.spectrum[index], value)
30+
31+
def dissolve(self):
32+
"""
33+
Decays the spectrum over time (Entropy).
34+
"""
35+
with self.lock:
36+
self.spectrum *= 0.95 # Decay factor
37+
38+
def get_state(self) -> np.ndarray:
39+
with self.lock:
40+
return self.spectrum.copy()
41+
42+
def analyze_stability(self) -> str:
43+
"""
44+
Determines if the covalent bond is stable.
45+
"""
46+
total_energy = np.sum(self.spectrum)
47+
if total_energy > 2.0:
48+
return "UNSTABLE"
49+
return "STABLE"
Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
import random
2+
import copy
3+
import logging
4+
from typing import Dict, List
5+
from backend.integrations.grid_interface import GridInterface
6+
7+
class EvolutionaryOptimizer:
8+
"""
9+
The 'Discovery Engine'.
10+
Uses Genetic Algorithms to explore the parameter space and find
11+
'Functionalities you don't know about' (Novel Optimizations).
12+
"""
13+
def __init__(self):
14+
self.grid = GridInterface()
15+
self.logger = logging.getLogger("EvoOptimizer")
16+
self.population_size = 10
17+
self.generations = 5
18+
19+
def evolve_profile(self, profile: Dict) -> Dict:
20+
"""
21+
Takes a base profile and evolves it to maximize efficiency/stability.
22+
Returns the 'Best Mutant'.
23+
"""
24+
self.logger.info("Starting Evolutionary Discovery...")
25+
26+
# 1. Initialize Population (Mutants)
27+
population = [self._mutate(profile) for _ in range(self.population_size)]
28+
29+
for gen in range(self.generations):
30+
# 2. Evaluate Fitness via Simulation
31+
fitness_scores = []
32+
for mutant in population:
33+
score = self._evaluate_fitness(mutant)
34+
fitness_scores.append((score, mutant))
35+
36+
# 3. Selection (Survival of the Fittest)
37+
fitness_scores.sort(key=lambda x: x[0], reverse=True)
38+
top_performers = [x[1] for x in fitness_scores[:int(self.population_size / 2)]]
39+
40+
self.logger.info(f"Generation {gen}: Best Score = {fitness_scores[0][0]}")
41+
42+
# 4. Reproduction (Breeding/Mutation)
43+
new_population = top_performers[:]
44+
while len(new_population) < self.population_size:
45+
parent = random.choice(top_performers)
46+
child = self._mutate(parent)
47+
new_population.append(child)
48+
49+
population = new_population
50+
51+
best_mutant = fitness_scores[0][1]
52+
return best_mutant
53+
54+
def _mutate(self, profile: Dict) -> Dict:
55+
"""
56+
Randomly alters profile parameters (Feed Rate, Strategy).
57+
"""
58+
mutant = copy.deepcopy(profile)
59+
for segment in mutant.get("segments", []):
60+
# Mutation: Random Feed Rate Adjustment
61+
if random.random() < 0.3:
62+
feed = segment.get("optimized_feed", 1000)
63+
# Try something wild?
64+
new_feed = feed * random.uniform(0.8, 1.5)
65+
segment["optimized_feed"] = int(new_feed)
66+
67+
# Mutation: Strategy Switch (Novelty)
68+
if random.random() < 0.1:
69+
segment["strategy"] = random.choice(["trochoidal", "adaptive", "plunge"])
70+
71+
return mutant
72+
73+
def _evaluate_fitness(self, profile: Dict) -> float:
74+
"""
75+
Simulates the profile and assigns a score.
76+
Score = Speed - Risk - Energy
77+
"""
78+
total_time = 0
79+
risk_penalty = 0
80+
81+
for segment in profile.get("segments", []):
82+
# Simulation
83+
status = self.grid.simulate_segment(segment)
84+
if status == "COLLISION":
85+
return -1000.0 # Instant Death
86+
elif status == "RISK":
87+
risk_penalty += 50
88+
89+
# Efficiency
90+
feed = segment.get("optimized_feed", 1000)
91+
dist = 10 # Mock distance
92+
time_taken = dist / feed
93+
total_time += time_taken
94+
95+
# Fitness Function
96+
score = (1.0 / total_time) * 1000 - risk_penalty
97+
return score
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
import logging
2+
import numpy as np
3+
import binascii
4+
import struct
5+
import platform
6+
import os
7+
8+
class HexTraceLogger:
9+
"""
10+
Advanced Log Composer that creates a Hexadecimal Trace of system links.
11+
Captures Covalent State (Numpy), System Info (Conda/Docker), and timestamps.
12+
"""
13+
def __init__(self):
14+
self.logger = logging.getLogger("HexTrace")
15+
self.system_fingerprint = self._get_system_fingerprint()
16+
17+
def _get_system_fingerprint(self) -> bytes:
18+
"""
19+
Captures static system info (Platform, Node) as bytes.
20+
"""
21+
uname = platform.uname()
22+
info = f"{uname.system}-{uname.release}-{uname.node}"
23+
return info.encode('utf-8')[:16].ljust(16, b'\0')
24+
25+
def compose_trace(self, spectrum: np.ndarray, event_id: int) -> str:
26+
"""
27+
Composes a Hexadecimal Trace String from the current state.
28+
Format: [EventID][SystemFP][SpectrumData][CheckSum]
29+
"""
30+
# 1. Event ID (4 bytes)
31+
b_id = struct.pack('>I', event_id)
32+
33+
# 2. System Fingerprint (16 bytes)
34+
b_sys = self.system_fingerprint
35+
36+
# 3. Spectrum Data (Numpy -> Bytes)
37+
# We start with the raw bytes of the float array
38+
b_spectrum = spectrum.tobytes()
39+
40+
# 4. Compose Payload
41+
payload = b_id + b_sys + b_spectrum
42+
43+
# 5. Checksum (CRC32)
44+
checksum = binascii.crc32(payload)
45+
b_checksum = struct.pack('>I', checksum)
46+
47+
# 6. Final Hex String
48+
final_bytes = payload + b_checksum
49+
hex_trace = binascii.hexlify(final_bytes).decode('ascii').upper()
50+
51+
return hex_trace
52+
53+
def log_trace(self, spectrum: np.ndarray, event_id: int):
54+
trace = self.compose_trace(spectrum, event_id)
55+
self.logger.info(f"TRACE_HEX: {trace}")
56+
return trace

0 commit comments

Comments
 (0)