Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 31 additions & 1 deletion clients/python/hancock_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,36 @@

import os
from typing import Optional
from openai import OpenAI

try:
from hancock_constants import OPENAI_IMPORT_ERROR_MSG, require_openai
except ImportError:
# Fallback definitions when hancock_constants is not available (e.g., in installed package).
OPENAI_IMPORT_ERROR_MSG = (
"The 'openai' package is required to use HancockClient. "
"Install it with 'pip install openai' and ensure NVIDIA_API_KEY is set."
)

def require_openai(client_cls: Optional[object] = None) -> None:
"""
Ensure that the optional 'openai' dependency is available.

This mirrors the behavior of the helper from hancock_constants: it defers
the ImportError until runtime (e.g., in the constructor) and provides a
clear error message when the dependency is missing.

The optional client_cls argument is accepted for compatibility with call
sites that invoke require_openai(OpenAI); if not provided, the module-level
OpenAI symbol is used.
"""
client = client_cls if client_cls is not None else OpenAI # type: ignore[name-defined]
if client is None:
raise ImportError(OPENAI_IMPORT_ERROR_MSG)

try:
from openai import OpenAI
except ImportError: # allow import; require_openai() enforces dependency in constructor
OpenAI = None # type: ignore

# ── Models ──────────────────────────────────────────────────────────────────
MODELS: dict[str, str] = {
Expand Down Expand Up @@ -83,6 +112,7 @@ def __init__(
coder_model: str = "qwen-coder",
base_url: str = "https://integrate.api.nvidia.com/v1",
):
require_openai(OpenAI)
key = api_key or os.environ.get("NVIDIA_API_KEY")
if not key:
raise ValueError(
Expand Down
22 changes: 17 additions & 5 deletions hancock_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,19 @@
export NVIDIA_API_KEY="nvapi-..."
or pass --api-key "nvapi-..."
"""
from __future__ import annotations
import argparse
import hmac
import json
import os
import sys
import readline # noqa: F401 β€” enables arrow-key history in CLI
from hancock_constants import OPENAI_IMPORT_ERROR_MSG, require_openai

try:
from openai import OpenAI
except ImportError:
sys.exit("Run: .venv/bin/pip install openai flask")
except ImportError: # allow import without OpenAI; client factories enforce requirement at runtime
OpenAI = None # type: ignore

# ── Hancock identity ──────────────────────────────────────────────────────────
PENTEST_SYSTEM = """You are Hancock, an elite penetration tester and offensive security specialist built by CyberViser.
Expand Down Expand Up @@ -246,21 +248,28 @@

def make_ollama_client() -> OpenAI:
"""Returns an OpenAI-compatible client pointed at the local Ollama server."""
require_openai(OpenAI)
return OpenAI(base_url=OLLAMA_BASE_URL, api_key="ollama")


def make_client(api_key: str) -> OpenAI:
"""Returns an OpenAI-compatible client pointed at NVIDIA NIM (legacy)."""
require_openai(OpenAI)
return OpenAI(base_url=NIM_BASE_URL, api_key=api_key)


def make_openai_client() -> OpenAI | None:
"""Returns an OpenAI client if credentials are available, else None."""
"""Returns an OpenAI client if credentials are available, else None.

Unlike the Ollama/NIM factories, OpenAI is a best-effort fallback, so the
absence of the dependency simply disables this path.
"""
if OpenAI is None:
return None
key = os.getenv("OPENAI_API_KEY", "")
org = os.getenv("OPENAI_ORG_ID", "")
if not key or key.startswith("sk-your"):
return None
return OpenAI(api_key=key, organization=org or None)
return OpenAI(api_key=key, organization=os.getenv("OPENAI_ORG_ID") or None)


def chat(client: OpenAI, history: list[dict], model: str, stream: bool = True,
Expand Down Expand Up @@ -964,6 +973,9 @@ def main():
parser.add_argument("--model", default=None, help="Model ID (overrides backend default)")
parser.add_argument("--server", action="store_true", help="Run as REST API server")
parser.add_argument("--port", type=int, default=int(os.getenv("HANCOCK_PORT", "5000")))
# Compatibility / no-op flags for autopilot runners
parser.add_argument("--max-autopilot-continues", type=int, default=None, help="Optional autopilot hint (ignored by Hancock)")
parser.add_argument("--allow-all", action="store_true", help="Optional autopilot hint (ignored by Hancock)")
args = parser.parse_args()

backend = os.getenv("HANCOCK_LLM_BACKEND", "ollama").lower()
Expand Down
9 changes: 9 additions & 0 deletions hancock_constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
"""Shared constants for Hancock modules."""

OPENAI_IMPORT_ERROR_MSG = "OpenAI client not installed. Run: pip install openai"


def require_openai(openai_cls):
"""Raise ImportError when the OpenAI dependency is missing."""
if openai_cls is None:
raise ImportError(OPENAI_IMPORT_ERROR_MSG)
Loading