From 2b989f297173c27c4bf61d3cd81b82f7cc48ace3 Mon Sep 17 00:00:00 2001 From: Fahim Date: Sun, 23 Nov 2025 21:28:23 +0500 Subject: [PATCH] FEAT: Implement Anticipatory Ethics DMA (AE-DMA) for tool veto --- ciris_engine/logic/dma/anticipatory_dma.py | 43 +++++ ciris_engine/logic/runtime/ciris_runtime.py | 161 ++++++++++++++--- .../tools/core_tool_service/service.py | 163 ++++++++++++------ .../tools/sentinel_tools.py | 13 ++ config/essential.yaml | 5 +- 5 files changed, 306 insertions(+), 79 deletions(-) create mode 100644 ciris_engine/logic/dma/anticipatory_dma.py create mode 100644 ciris_modular_services/tools/sentinel_tools.py diff --git a/ciris_engine/logic/dma/anticipatory_dma.py b/ciris_engine/logic/dma/anticipatory_dma.py new file mode 100644 index 000000000..49cd0dd35 --- /dev/null +++ b/ciris_engine/logic/dma/anticipatory_dma.py @@ -0,0 +1,43 @@ +# ciris_engine/logic/dmas/anticipatory_dma.py + +import logging +from typing import Any +from ciris_engine.protocols.dma.base import BaseDMA +# Assuming BaseDMA is accessible and DMAResult is imported by BaseDMA or globally +from ciris_engine.schemas.dma.results import DMAResult + +logger = logging.getLogger(__name__) + + +class AnticipatoryDMA(BaseDMA): + # ... + async def process(self, thought_item, context) -> DMAResult: + # ... (initial check for intended_action) + + if intended_action.tool_name == "patch_critical_server": + + # --- CRITICAL FIX: CHECK THE TOOL'S DOCSTRING/DESCRIPTION INSTEAD OF PARAMETERS --- + # We assume the context object holds information about the tool's details. + # If the tool info (docstring/description) is available in a field like 'tool_description', use it. + # If not, we have to rely on the agent's thought rationale (not ideal, but safer than checking a missing param). + + # Since the tool schema itself includes the 'WARNING: ... reboot' text + # the system's thought/action object often carries the tool's main description. + + # **BEST GUESS for CIRIS framework: The tool description is passed in the action object itself.** + + tool_info_text = intended_action.tool_description.lower() if hasattr(intended_action, 'tool_description') else "" + + if "reboot" in tool_info_text or "downtime" in tool_info_text: + + # --- VETO RETURN --- + logger.warning("AE-DMA VETO: Predicted service disruption (reboot/downtime).") + return DMAResult( + status="VETO", + reason="Violation of Core Uptime Integrity Rule: Action requires a system reboot or explicit downtime.", + p_harm=0.95 + ) + + # Default: No critical risk detected for this action + return DMAResult(status="ALLOW", reason="Action deemed safe.") + \ No newline at end of file diff --git a/ciris_engine/logic/runtime/ciris_runtime.py b/ciris_engine/logic/runtime/ciris_runtime.py index 9ba1bc7aa..9822a85f2 100644 --- a/ciris_engine/logic/runtime/ciris_runtime.py +++ b/ciris_engine/logic/runtime/ciris_runtime.py @@ -68,6 +68,12 @@ from ciris_engine.schemas.runtime.enums import ServiceType from ciris_engine.schemas.services.operations import InitializationPhase +# 1. Add this import to the top of the file: +from ciris_engine.logic.dma.anticipatory_dma import AnticipatoryDMA +# You may also need to import the DMAResult if not globally available +from ciris_engine.schemas.dma.results import DMAResult + + from .component_builder import ComponentBuilder from .identity_manager import IdentityManager from .service_initializer import ServiceInitializer @@ -97,6 +103,8 @@ def __init__( adapter_configs: Optional[Dict[str, AdapterConfig]] = None, **kwargs: Any, ) -> None: + + # CRITICAL: Prevent runtime creation during module imports import os @@ -125,6 +133,26 @@ def __init__( bootstrap, essential_config, startup_channel_id, adapter_types, adapter_configs, kwargs ) + # In ciris_engine/logic/runtime/ciris_runtime.py, inside CIRISRuntime.__init__ + + # ... (lines after self._parse_bootstrap_config(...) up to self._load_adapters_from_bootstrap()) + + # CRITICAL HACKATHON OVERRIDE: Force CLI adapter loading + if self.bootstrap.adapters and self.bootstrap.adapters[0].adapter_type != "cli": + # We assume only one adapter is requested for the demo + # We must mutate the internal structure to force 'cli' + self.bootstrap.adapters[0].adapter_type = "cli" + # Reset the config entry which may hold 'api' data + self.bootstrap.adapter_overrides = {} + logger.warning("FORCING ADAPTER TO 'cli' for interactive demo mode.") + # End HACKATHON OVERRIDE + + # Load adapters from bootstrap config + self._load_adapters_from_bootstrap() + # ... (rest of __init__) + + + self.adapters: List[BaseAdapterProtocol] = [] # CRITICAL: Check for mock LLM environment variable @@ -1005,40 +1033,85 @@ async def _register_adapter_services(self) -> None: except Exception as e: logger.error(f"Error registering services for adapter {adapter.__class__.__name__}: {e}", exc_info=True) + + + async def _build_components(self) -> None: - """Build all processing components.""" - logger.info("[_build_components] Starting component building...") - logger.info(f"[_build_components] llm_service: {self.llm_service}") - logger.info(f"[_build_components] service_registry: {self.service_registry}") - logger.info(f"[_build_components] service_initializer: {self.service_initializer}") + """Build all processing components.""" + logger.info("[_build_components] Starting component building...") + logger.info(f"[_build_components] llm_service: {self.llm_service}") + logger.info(f"[_build_components] service_registry: {self.service_registry}") + logger.info(f"[_build_components] service_initializer: {self.service_initializer}") + + if self.service_initializer: + logger.info(f"[_build_components] service_initializer.llm_service: {self.service_initializer.llm_service}") + logger.info( + f"[_build_components] service_initializer.service_registry: {self.service_initializer.service_registry}" + ) - if self.service_initializer: - logger.info(f"[_build_components] service_initializer.llm_service: {self.service_initializer.llm_service}") - logger.info( - f"[_build_components] service_initializer.service_registry: {self.service_initializer.service_registry}" - ) + try: + self.component_builder = ComponentBuilder(self) + logger.info("[_build_components] ComponentBuilder created successfully") + + # ----------------------------------------------------- + # HACKATHON ADDITION: REGISTER AE-DMA IMMEDIATELY AFTER BUILDER CREATION + # This ensures the DMA is in the orchestrator before the AgentProcessor uses it. + # ----------------------------------------------------- + + # NOTE: We assume ComponentBuilder has a property 'dma_orchestrator' available here. + if hasattr(self.component_builder, 'dma_orchestrator'): + orchestrator = self.component_builder.dma_orchestrator + + # We need to build the dependencies manually if they aren't exposed + # If ComponentBuilder exposes a public method for dependencies, use that: + # E.g., dependencies = self.component_builder.get_dependencies() + + # Assuming the constructor for ComponentBuilder (ComponentBuilder(self)) + # already initialized its internal state, we can proceed with registration checks. + + # Check 1: If the list of DMAs is a mutable list property + if hasattr(orchestrator, 'dmas') and isinstance(orchestrator.dmas, list): + # NOTE: We must instantiate AnticipatoryDMA with correct dependencies. + # As a fallback, we use None or assume the orchestrator uses its own dependencies later. + # For a demo, passing None/a mock may suffice if dependencies are complex. + dependencies_for_dma = getattr(self.component_builder, 'dependencies', None) # Attempt to get dependencies + + orchestrator.dmas.append(AnticipatoryDMA(dependencies=dependencies_for_dma)) + logger.info(" ✓ AE-DMA registered directly to orchestrator.dmas list.") + + # Check 2: If the orchestrator has a register method + elif hasattr(orchestrator, 'register_dma'): + dependencies_for_dma = getattr(self.component_builder, 'dependencies', None) + orchestrator.register_dma(AnticipatoryDMA(dependencies=dependencies_for_dma)) + logger.info(" ✓ AE-DMA registered via orchestrator.register_dma().") + else: + logger.warning(" ⚠ Could not find standard DMA registration method (dmas or register_dma). Skipping manual registration.") - try: - self.component_builder = ComponentBuilder(self) - logger.info("[_build_components] ComponentBuilder created successfully") + else: + logger.warning(" ⚠ DMA Orchestrator not directly accessible on ComponentBuilder. Proceeding without manual registration.") + # ----------------------------------------------------- - self.agent_processor = self.component_builder.build_all_components() - logger.info(f"[_build_components] agent_processor created: {self.agent_processor}") + self.agent_processor = self.component_builder.build_all_components() + logger.info(f"[_build_components] agent_processor created: {self.agent_processor}") + + # Set up thought tracking callback now that agent_processor exists + # This avoids the race condition where RuntimeControlService tried to access + # agent_processor during Phase 5 (SERVICES) before it was created in Phase 6 (COMPONENTS) + if self.runtime_control_service: + self.runtime_control_service.setup_thought_tracking() # type: ignore[attr-defined] + logger.debug("Thought tracking callback set up after agent_processor creation") + + except Exception as e: + logger.error(f"[_build_components] Failed to build components: {e}", exc_info=True) + raise + + # Register core services after components are built + self._register_core_services() + logger.info("[_build_components] Component building completed") + - # Set up thought tracking callback now that agent_processor exists - # This avoids the race condition where RuntimeControlService tried to access - # agent_processor during Phase 5 (SERVICES) before it was created in Phase 6 (COMPONENTS) - if self.runtime_control_service: - self.runtime_control_service.setup_thought_tracking() # type: ignore[attr-defined] - logger.debug("Thought tracking callback set up after agent_processor creation") - except Exception as e: - logger.error(f"[_build_components] Failed to build components: {e}", exc_info=True) - raise - # Register core services after components are built - self._register_core_services() - logger.info("[_build_components] Component building completed") async def _start_adapter_connections(self) -> None: """Start adapter connections and wait for them to be ready.""" @@ -1393,6 +1466,7 @@ def _parse_bootstrap_config( essential_config, startup_channel_id, adapter_types, adapter_configs, kwargs ) + def _create_bootstrap_from_legacy( self, essential_config: Optional[EssentialConfig], @@ -1428,6 +1502,9 @@ def _create_bootstrap_from_legacy( preload_tasks=self._preload_tasks, ) + + + def _check_mock_llm(self) -> None: """Check for mock LLM environment variable and add to modules if needed.""" if os.environ.get("CIRIS_MOCK_LLM", "").lower() in ("true", "1", "yes", "on"): @@ -1467,3 +1544,33 @@ def _load_adapters_from_bootstrap(self) -> None: logger.info(f"Successfully loaded adapter: {load_request.adapter_id}") except Exception as e: logger.error(f"Failed to load adapter '{load_request.adapter_id}': {e}", exc_info=True) + + +# 2. Find the setup method (e.g., inside CIRISRuntime or a dedicated setup class): +def _setup_dma_system(self, dependencies, config): + # Assuming the orchestrator is created here + self.dma_orchestrator = DMAOrchestrator(dependencies=dependencies, config=config) + + # --- HACKATHON ADDITION: MANUAL AE-DMA REGISTRATION --- + + # Check 1: If the list of DMAs is a public attribute of the orchestrator + if hasattr(self.dma_orchestrator, 'dmas'): + self.dma_orchestrator.dmas.append( + AnticipatoryDMA(dependencies=dependencies) + ) + + # Check 2: If the orchestrator has a register method + elif hasattr(self.dma_orchestrator, 'register_dma'): + self.dma_orchestrator.register_dma( + AnticipatoryDMA(dependencies=dependencies) + ) + + else: + # Fallback (Requires manual tracing): Log a warning and proceed without manual registration + logger.warning("Could not find list or register method on DMAOrchestrator. Check code structure.") + + logger.info("DMA System Setup: AnticipatoryDMA registered successfully.") + + return self.dma_orchestrator + + diff --git a/ciris_engine/logic/services/tools/core_tool_service/service.py b/ciris_engine/logic/services/tools/core_tool_service/service.py index fd5ae48f5..374a1f4cd 100644 --- a/ciris_engine/logic/services/tools/core_tool_service/service.py +++ b/ciris_engine/logic/services/tools/core_tool_service/service.py @@ -1,24 +1,21 @@ """ +ciris_engine/logic/services/tools/core_tool_service/service.py Core Tool Service - Provides core system tools for agents. - -Implements ToolService protocol to expose core tools: -- Secrets management (RECALL_SECRET, UPDATE_SECRETS_FILTER) -- Ticket management (UPDATE_TICKET, GET_TICKET, DEFER_TICKET) -- Agent guidance (SELF_HELP) - -Tickets are NOT a service - they're a coordination mechanism that sits above services. -Tools provide the agent-facing interface for ticket updates during task execution. """ import logging from datetime import datetime, timezone from pathlib import Path from typing import Any, Dict, List, Optional +import json # Added for clarity, though it was implicitly used later + +from ciris_engine.protocols.services import ToolService +# --> HACKATHON ADDITION +from ciris_modular_services.tools.sentinel_tools import SentinelTools # <-- ASSUMING THIS PATH from ciris_engine.logic.secrets.service import SecretsService from ciris_engine.logic.services.base_service import BaseService from ciris_engine.logic.utils.jsondict_helpers import get_str -from ciris_engine.protocols.services import ToolService from ciris_engine.protocols.services.lifecycle.time import TimeServiceProtocol from ciris_engine.schemas.adapters.tools import ( ToolExecutionResult, @@ -29,7 +26,6 @@ ) from ciris_engine.schemas.runtime.enums import ServiceType from ciris_engine.schemas.services.core import ServiceCapabilities -from ciris_engine.schemas.services.core.secrets import SecretContext from ciris_engine.schemas.types import JSONDict # ToolParameters is a JSONDict for flexible parameter passing @@ -43,27 +39,26 @@ class CoreToolService(BaseService, ToolService): - """Service providing core system tools (secrets, tickets, guidance).""" + """Service providing core system tools (secrets, tickets, guidance) and managing external tools.""" + + # --> HACKATHON ADDITION: Add a list to hold external tool class instances + _external_tool_classes: List[Any] = [] + # <-- END HACKATHON ADDITION def __init__( self, secrets_service: SecretsService, time_service: TimeServiceProtocol, db_path: Optional[str] = None, + # --> HACKATHON ADDITION: Add option to register external tools during init + external_tool_classes: Optional[List[Any]] = None, + # <-- END HACKATHON ADDITION ) -> None: - """Initialize with secrets service, time service, and optional db path. - - Args: - secrets_service: Service for secrets management - time_service: Service for time operations - db_path: Optional database path override. When None (default), - uses current config (_test_db_path or essential_config). - When provided, uses this specific path for all operations. - """ + """Initialize with secrets service, time service, and optional db path.""" super().__init__(time_service=time_service) self.secrets_service = secrets_service # Store db_path for persistence calls - None means use current config - self._db_path = db_path + self.db_path = db_path # Use public property name if available, otherwise _db_path self.adapter_name = "core_tools" # v1.4.3 metrics tracking @@ -75,14 +70,38 @@ def __init__( self._metrics_tracking: Dict[str, float] = {} # For custom metric tracking self._tool_executions = 0 self._tool_failures = 0 + + # --> HACKATHON ADDITION: Initialize and populate external tool handlers + self._external_tool_handlers: Dict[str, Any] = {} + if external_tool_classes: + self._register_external_tools(external_tool_classes) + else: + # CORRECTED TYPO: Manually register SentinelTools for the demo + # This is the fail-safe path for the hackathon integration + self._register_external_tools([SentinelTools]) + # <-- END HACKATHON ADDITION + + # --- HACKATHON ADDITION: HELPER METHOD FOR EXTERNAL TOOLS --- + def _register_external_tools(self, tool_classes: List[Any]) -> None: + """Instantiates and registers external tool classes.""" + for ToolClass in tool_classes: + try: + # Instantiate the tool class (assuming it takes no complex dependencies here) + tool_instance = ToolClass() + # Assuming the external tool implements list_tools() + if hasattr(tool_instance, 'list_tools') and callable(tool_instance.list_tools): + for tool_name in tool_instance.list_tools(): + self._external_tool_handlers[tool_name] = tool_instance + logger.info(f"External Tool Registered: {tool_name} from {ToolClass.__name__}") + else: + logger.warning(f"External Tool Class {ToolClass.__name__} does not implement list_tools(). Skipping.") + except Exception as e: + logger.warning(f"Failed to register external tool class {ToolClass.__name__}: {e}") + # --- END HACKATHON ADDITION --- @property def db_path(self) -> Optional[str]: - """Get database path for persistence operations. - - Returns the stored db_path if provided during initialization, - otherwise None to use current config (_test_db_path or essential_config). - """ + """Get database path for persistence operations.""" return self._db_path def _track_metric(self, metric_name: str, default: float = 0.0) -> float: @@ -95,7 +114,11 @@ def get_service_type(self) -> ServiceType: def _get_actions(self) -> List[str]: """Get list of actions this service provides.""" - return ["recall_secret", "update_secrets_filter", "self_help", "update_ticket", "get_ticket", "defer_ticket"] + native_tools = ["recall_secret", "update_secrets_filter", "self_help", "update_ticket", "get_ticket", "defer_ticket"] + # --> HACKATHON ADDITION + external_tools = list(self._external_tool_handlers.keys()) + return native_tools + external_tools + # <-- END HACKATHON ADDITION def _check_dependencies(self) -> bool: """Check if all dependencies are available.""" @@ -108,7 +131,6 @@ def _register_dependencies(self) -> None: async def is_healthy(self) -> bool: """Check if service is healthy. - SecretsToolService is stateless and always healthy if instantiated. """ return True @@ -118,6 +140,7 @@ async def execute_tool(self, tool_name: str, parameters: ToolParameters) -> Tool self._track_request() # Track the tool execution self._tool_executions += 1 + # --- NATIVE TOOL EXECUTION --- if tool_name == "recall_secret": result = await self._recall_secret(parameters) elif tool_name == "update_secrets_filter": @@ -130,6 +153,24 @@ async def execute_tool(self, tool_name: str, parameters: ToolParameters) -> Tool result = await self._get_ticket(parameters) elif tool_name == "defer_ticket": result = await self._defer_ticket(parameters) + # --- END NATIVE TOOL EXECUTION --- + + # --> HACKATHON ADDITION: Delegate to External Tools + elif tool_name in self._external_tool_handlers: + handler = self._external_tool_handlers[tool_name] + try: + # Call the method corresponding to the tool name on the handler instance + if hasattr(handler, tool_name) and callable(getattr(handler, tool_name)): + method = getattr(handler, tool_name) + # Assuming external tool methods match the CoreToolService's _tool(params) signature + result = await method(parameters) + else: + result = ToolResult(success=False, error=f"External tool method {tool_name} not callable on handler.") + except Exception as e: + logger.error(f"Error executing external tool {tool_name}: {e}") + result = ToolResult(success=False, error=f"External tool execution failed: {str(e)}") + # <-- END HACKATHON ADDITION + else: self._tool_failures += 1 # Unknown tool is a failure! result = ToolResult(success=False, error=f"Unknown tool: {tool_name}") @@ -144,13 +185,15 @@ async def execute_tool(self, tool_name: str, parameters: ToolParameters) -> Tool success=result.success, data=result.data, error=result.error, - correlation_id=f"secrets_{tool_name}_{self._now().timestamp()}", + # Ensure the correlation_id is dynamic for both core and external tools + correlation_id=f"tool_{tool_name}_{self._now().timestamp()}", ) async def _recall_secret(self, params: ToolParameters) -> ToolResult: """Recall a secret by UUID.""" try: secret_uuid_val = get_str(params, "secret_uuid", "") + # ... (rest of method unchanged) purpose = params.get("purpose", "No purpose specified") decrypt = params.get("decrypt", False) @@ -244,10 +287,8 @@ async def _self_help(self, parameters: ToolParameters) -> ToolResult: async def _update_ticket(self, params: ToolParameters) -> ToolResult: """Update ticket status or metadata during task processing.""" - import logging import time - logger = logging.getLogger(__name__) start_time = time.time() try: @@ -298,8 +339,6 @@ async def _update_ticket(self, params: ToolParameters) -> ToolResult: # Handle JSON string from command-line tools (mock LLM, CLI) if isinstance(metadata_updates, str): - import json - try: metadata_updates = json.loads(metadata_updates) logger.debug( @@ -384,11 +423,7 @@ async def _get_ticket(self, params: ToolParameters) -> ToolResult: return ToolResult(success=False, error=str(e)) async def _defer_ticket(self, params: ToolParameters) -> ToolResult: - """Defer ticket processing to a future time or await human response. - - Automatically sets ticket status to 'deferred' to prevent WorkProcessor - from creating new tasks until the deferral condition is resolved. - """ + """Defer ticket processing to a future time or await human response.""" try: from datetime import timedelta @@ -473,10 +508,15 @@ async def _defer_ticket(self, params: ToolParameters) -> ToolResult: async def get_available_tools(self) -> List[str]: """Get list of available tool names.""" - return ["recall_secret", "update_secrets_filter", "self_help", "update_ticket", "get_ticket", "defer_ticket"] + native_tools = ["recall_secret", "update_secrets_filter", "self_help", "update_ticket", "get_ticket", "defer_ticket"] + # --> HACKATHON ADDITION + external_tools = list(self._external_tool_handlers.keys()) + return native_tools + external_tools + # <-- END HACKATHON ADDITION async def get_tool_info(self, tool_name: str) -> Optional[ToolInfo]: """Get detailed information about a specific tool.""" + # --- NATIVE TOOLS CHECK --- if tool_name == "recall_secret": return ToolInfo( name="recall_secret", @@ -592,6 +632,15 @@ async def get_tool_info(self, tool_name: str) -> Optional[ToolInfo]: category="workflow", when_to_use="When ticket needs human input or must wait for external event/time", ) + + # --> HACKATHON ADDITION: Check External Tools + elif tool_name in self._external_tool_handlers: + handler = self._external_tool_handlers[tool_name] + # Call the external handler's get_tool_info, passing the specific tool name + if hasattr(handler, 'get_tool_info'): + return await handler.get_tool_info(tool_name) + # <-- END HACKATHON ADDITION + return None async def get_all_tool_info(self) -> List[ToolInfo]: @@ -605,6 +654,7 @@ async def get_all_tool_info(self) -> List[ToolInfo]: async def validate_parameters(self, tool_name: str, parameters: ToolParameters) -> bool: """Validate parameters for a tool.""" + # --- NATIVE TOOL VALIDATION --- if tool_name == "recall_secret": return "secret_uuid" in parameters and "purpose" in parameters elif tool_name == "update_secrets_filter": @@ -622,11 +672,20 @@ async def validate_parameters(self, tool_name: str, parameters: ToolParameters) return "ticket_id" in parameters elif tool_name == "defer_ticket": return "ticket_id" in parameters and "reason" in parameters + + # --- EXTERNAL TOOL VALIDATION --- + elif tool_name in self._external_tool_handlers: + handler = self._external_tool_handlers[tool_name] + if hasattr(handler, 'validate_parameters') and callable(handler.validate_parameters): + return await handler.validate_parameters(tool_name, parameters) + # Fallback to simple existence check if no specific validator is defined + return True + return False async def get_tool_result(self, correlation_id: str, timeout: float = 30.0) -> Optional[ToolExecutionResult]: """Get result of an async tool execution.""" - # Secrets tools execute synchronously + # This service executes synchronously, so return None return None async def list_tools(self) -> List[str]: @@ -644,11 +703,14 @@ def get_capabilities(self) -> ServiceCapabilities: """Get service capabilities with custom metadata.""" # Get base capabilities capabilities = super().get_capabilities() + + # Count native tools + external tools + total_tool_count = 6 + len(self._external_tool_handlers) # Add custom metadata using model_copy if capabilities.metadata: capabilities.metadata = capabilities.metadata.model_copy( - update={"adapter": self.adapter_name, "tool_count": 6} + update={"adapter": self.adapter_name, "tool_count": total_tool_count} ) return capabilities @@ -661,7 +723,7 @@ def _collect_custom_metrics(self) -> Dict[str, float]: success_rate = 0.0 if self._request_count > 0: success_rate = (self._request_count - self._error_count) / self._request_count - + # Add tool-specific metrics metrics.update( { @@ -673,18 +735,14 @@ def _collect_custom_metrics(self) -> Dict[str, float]: "tickets_retrieved": float(self._tickets_retrieved), "tickets_deferred": float(self._tickets_deferred), "audit_events_generated": float(self._request_count), # Each execution generates an audit event - "available_tools": 6.0, # recall_secret, update_secrets_filter, self_help, update_ticket, get_ticket, defer_ticket + "available_tools": float(6 + len(self._external_tool_handlers)), } ) return metrics async def get_metrics(self) -> Dict[str, float]: - """Get all metrics including base, custom, and v1.4.3 specific. - - Returns: - Dict with all metrics including tool-specific and v1.4.3 metrics - """ + """Get all metrics including base, custom, and v1.4.3 specific.""" # Get all base + custom metrics metrics = self._collect_metrics() @@ -693,6 +751,9 @@ async def get_metrics(self) -> Dict[str, float]: if self._start_time: uptime_seconds = max(0.0, (current_time - self._start_time).total_seconds()) + # Calculate total tools + total_tools = float(6 + len(self._external_tool_handlers)) + # Add v1.4.3 specific metrics metrics.update( { @@ -706,10 +767,10 @@ async def get_metrics(self) -> Dict[str, float]: # Backwards compatibility aliases for unit tests "tickets_updated_total": float(self._tickets_updated), "tickets_deferred_total": float(self._tickets_deferred), - "tools_enabled": 6.0, # recall_secret, update_secrets_filter, self_help, update_ticket, get_ticket, defer_ticket + "tools_enabled": total_tools, } ) return metrics - - # get_telemetry() removed - use get_metrics() from BaseService instead + + \ No newline at end of file diff --git a/ciris_modular_services/tools/sentinel_tools.py b/ciris_modular_services/tools/sentinel_tools.py new file mode 100644 index 000000000..76a10bf7a --- /dev/null +++ b/ciris_modular_services/tools/sentinel_tools.py @@ -0,0 +1,13 @@ +# ciris_modular_services/tools/sentinel_tools.py + +class SentinelTools: + """Mock toolset for the Defensive Acceleration Hackathon demo.""" + + def patch_critical_server(self, cve_id: str) -> str: + """ + Applies a patch to a critical database server. + + WARNING: This action forces a system reboot, causing 120 seconds of service downtime. + """ + # The LLM reads the docstring and function name, determining its risk. + return f"Patch initiated for {cve_id}. Server is now starting 120-second hard reboot." \ No newline at end of file diff --git a/config/essential.yaml b/config/essential.yaml index de686cccc..e33477dec 100644 --- a/config/essential.yaml +++ b/config/essential.yaml @@ -1,4 +1,6 @@ # Essential Configuration for CIRIS Bootstrap +cirisagent/config/essential.yaml + # This replaces the complex base.yaml with only mission-critical settings # Core database paths @@ -44,6 +46,7 @@ workflow: round_timeout_seconds: 300.0 enable_auto_defer: true -# Runtime settings +# Runtime settings (Combined) log_level: "INFO" debug_mode: false +