Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 47 additions & 26 deletions dev-tools/mcp-mock-server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,19 @@
useful for validating that Lightspeed Core Stack correctly sends auth headers
to MCP servers.

The server runs both HTTP and HTTPS simultaneously on consecutive ports.
The server runs HTTP and optionally HTTPS on consecutive ports.
Set MCP_HTTP_ONLY=true to disable HTTPS (useful when openssl is unavailable).

Usage:
python server.py [http_port]

Example:
python server.py 3000 # HTTP on 3000, HTTPS on 3001
MCP_HTTP_ONLY=true python server.py 3000 # HTTP only on 3000
"""

import json
import os
import ssl
import subprocess
import sys
Expand Down Expand Up @@ -268,61 +271,79 @@ def run_https_server(port: int, httpd: HTTPServer) -> None:


def main() -> None:
"""Start the mock MCP server with both HTTP and HTTPS."""
"""Start the mock MCP server with HTTP and optionally HTTPS."""
http_port = int(sys.argv[1]) if len(sys.argv) > 1 else 3000
https_port = http_port + 1
http_only = os.environ.get("MCP_HTTP_ONLY", "").lower() in ("true", "1", "yes")

# Create HTTP server
http_server = HTTPServer(("", http_port), MCPMockHandler)

# Create HTTPS server with self-signed certificate
https_server = HTTPServer(("", https_port), MCPMockHandler)

# Generate or load self-signed certificate
script_dir = Path(__file__).parent
cert_dir = script_dir / ".certs"
cert_file, key_file = generate_self_signed_cert(cert_dir)

# Wrap socket with SSL
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
context.load_cert_chain(cert_file, key_file)
https_server.socket = context.wrap_socket(https_server.socket, server_side=True)
https_server = None
if not http_only:
try:
https_port = http_port + 1
https_server = HTTPServer(("", https_port), MCPMockHandler)

# Generate or load self-signed certificate
script_dir = Path(__file__).parent
cert_dir = script_dir / ".certs"
cert_file, key_file = generate_self_signed_cert(cert_dir)

# Wrap socket with SSL
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
context.load_cert_chain(cert_file, key_file)
https_server.socket = context.wrap_socket(
https_server.socket, server_side=True
)
except (subprocess.CalledProcessError, FileNotFoundError, OSError) as e:
print(f"HTTPS setup failed ({e}), running HTTP only")
https_server = None

print("=" * 70)
print("MCP Mock Server starting with HTTP and HTTPS")
if https_server:
print("MCP Mock Server starting with HTTP and HTTPS")
else:
print("MCP Mock Server starting (HTTP only)")
print("=" * 70)
print(f"HTTP: http://localhost:{http_port}")
print(f"HTTPS: https://localhost:{https_port}")
if https_server:
print(f"HTTPS: https://localhost:{https_port}")
print("=" * 70)
print("Debug endpoints:")
print(" • /debug/headers - View captured headers")
print(" • /debug/requests - View request log")
print("MCP endpoint:")
print(" • POST to any path (e.g., / or /mcp/v1/list_tools)")
print("=" * 70)
print("Note: HTTPS uses a self-signed certificate (for testing only)")
if https_server:
print("Note: HTTPS uses a self-signed certificate (for testing only)")
print("Press Ctrl+C to stop")
print()

# Start servers in separate threads
# Start HTTP server in a thread
http_thread = threading.Thread(
target=run_http_server, args=(http_port, http_server), daemon=True
)
https_thread = threading.Thread(
target=run_https_server, args=(https_port, https_server), daemon=True
)

http_thread.start()
https_thread.start()

# Start HTTPS server if available
https_thread = None
if https_server:
https_thread = threading.Thread(
target=run_https_server, args=(https_port, https_server), daemon=True
)
https_thread.start()

try:
# Keep main thread alive
http_thread.join()
https_thread.join()
if https_thread:
https_thread.join()
except KeyboardInterrupt:
print("\nShutting down mock servers...")
http_server.shutdown()
https_server.shutdown()
if https_server:
https_server.shutdown()


if __name__ == "__main__":
Expand Down
31 changes: 31 additions & 0 deletions tests/e2e-prow/rhoai/configs/lightspeed-stack-auth-noop-token.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: Lightspeed Core Service (LCS)
service:
host: 0.0.0.0
port: 8080
auth_enabled: false
workers: 1
color_log: true
access_log: true
llama_stack:
# Uses a remote llama-stack service
# The instance would have already been started with a llama-stack-run.yaml file
use_as_library_client: false
# Alternative for "as library use"
# use_as_library_client: true
# library_client_config_path: <path-to-llama-stack-run.yaml-file>
url: http://${env.E2E_LLAMA_HOSTNAME}:8321
api_key: xyzzy
user_data_collection:
feedback_enabled: true
feedback_storage: "/tmp/data/feedback"
transcripts_enabled: true
transcripts_storage: "/tmp/data/transcripts"

# Conversation cache for storing Q&A history
conversation_cache:
type: "sqlite"
sqlite:
db_path: "/tmp/data/conversation-cache.db"

authentication:
module: "noop-with-token"
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
name: Lightspeed Core Service (LCS) - RH Identity Auth
service:
host: 0.0.0.0
port: 8080
auth_enabled: true
workers: 1
color_log: true
access_log: true
llama_stack:
use_as_library_client: false
url: http://${env.E2E_LLAMA_HOSTNAME}:8321
api_key: xyzzy
user_data_collection:
feedback_enabled: true
feedback_storage: "/tmp/data/feedback"
transcripts_enabled: true
transcripts_storage: "/tmp/data/transcripts"
conversation_cache:
type: "sqlite"
sqlite:
db_path: "/tmp/data/conversation-cache.db"
authentication:
module: "rh-identity"
rh_identity_config:
required_entitlements: ["rhel"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
name: Lightspeed Core Service (LCS)
service:
host: 0.0.0.0
port: 8080
auth_enabled: false
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

auth_enabled: false likely makes the authentication block a no-op — clarify intent.

When auth is disabled, the configured noop-with-token module is never invoked. If this config is only meant to exercise the invalid-storage-path error path and does not require token-based requests, the authentication block is dead config and can be removed. If token parsing is still needed (e.g., for user-tracking), the intent should be documented with a comment.

Also applies to: 24-25

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@tests/e2e-prow/rhoai/configs/lightspeed-stack-invalid-feedback-storage.yaml`
at line 5, The config currently sets auth_enabled: false while still defining an
authentication block with the noop-with-token module, which is dead config;
either remove the entire authentication block (including the noop-with-token
entry) if you only intend to exercise invalid-storage-path without token
parsing, or set auth_enabled: true and keep the noop-with-token entry and add a
brief comment explaining why token parsing is required for user-tracking; update
the file so auth_enabled and the authentication block are consistent and
document the intent.

workers: 1
color_log: true
access_log: true
llama_stack:
# Uses a remote llama-stack service
# The instance would have already been started with a llama-stack-run.yaml file
use_as_library_client: false
# Alternative for "as library use"
# use_as_library_client: true
# library_client_config_path: <path-to-llama-stack-run.yaml-file>
url: http://${env.E2E_LLAMA_HOSTNAME}:8321
api_key: xyzzy
user_data_collection:
feedback_enabled: true
feedback_storage: "/invalid"
transcripts_enabled: true
transcripts_storage: "/tmp/data/transcripts"

authentication:
module: "noop-with-token"
27 changes: 27 additions & 0 deletions tests/e2e-prow/rhoai/configs/lightspeed-stack-no-cache.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: Lightspeed Core Service (LCS)
service:
host: 0.0.0.0
port: 8080
auth_enabled: false
workers: 1
color_log: true
access_log: true
llama_stack:
# Uses a remote llama-stack service
# The instance would have already been started with a llama-stack-run.yaml file
use_as_library_client: false
# Alternative for "as library use"
# use_as_library_client: true
# library_client_config_path: <path-to-llama-stack-run.yaml-file>
url: http://${env.E2E_LLAMA_HOSTNAME}:8321
api_key: xyzzy
user_data_collection:
feedback_enabled: true
feedback_storage: "/tmp/data/feedback"
transcripts_enabled: true
transcripts_storage: "/tmp/data/transcripts"

# NO conversation_cache configured - for testing error handling

authentication:
module: "noop-with-token"
94 changes: 94 additions & 0 deletions tests/e2e-prow/rhoai/configs/lightspeed-stack-rbac.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
name: Lightspeed Core Service (RBAC E2E Tests)
service:
host: 0.0.0.0
port: 8080
auth_enabled: true
workers: 1
color_log: true
access_log: true

llama_stack:
use_as_library_client: false
url: http://${env.E2E_LLAMA_HOSTNAME}:8321
api_key: xyzzy

user_data_collection:
feedback_enabled: true
feedback_storage: "/tmp/data/feedback"
transcripts_enabled: true
transcripts_storage: "/tmp/data/transcripts"

# Conversation cache for storing Q&A history
conversation_cache:
type: "sqlite"
sqlite:
db_path: "/tmp/data/conversation-cache.db"

# JWK token authentication with role extraction
authentication:
module: "jwk-token"
jwk_config:
url: "http://mock-jwks:8000/.well-known/jwks.json"
jwt_configuration:
user_id_claim: "sub"
username_claim: "name"
# Role rules: extract roles from JWT claims
role_rules:
# Grant 'admin' role to users with admin=true in JWT
- jsonpath: "$.admin"
operator: "equals"
value: [true]
roles: ["admin"]
# Grant 'user' role to users with role=user in JWT
- jsonpath: "$.role"
operator: "equals"
value: ["user"]
roles: ["user"]
# Grant 'viewer' role to users with role=viewer in JWT
- jsonpath: "$.role"
operator: "equals"
value: ["viewer"]
roles: ["viewer"]
# Grant 'query_only' role based on permissions array containing 'query'
- jsonpath: "$.permissions[*]"
operator: "contains"
value: "query"
roles: ["query_only"]

# Authorization: map roles to actions
authorization:
access_rules:
# Admin role gets full access
- role: "admin"
actions: ["admin"]
# User role can query, access conversations, and provide feedback
- role: "user"
actions:
- "query"
- "streaming_query"
- "get_conversation"
- "list_conversations"
- "delete_conversation"
- "update_conversation"
- "feedback"
- "get_models"
- "get_tools"
- "info"
- "model_override"
# Viewer role can only read (no mutations)
- role: "viewer"
actions:
- "get_conversation"
- "list_conversations"
- "get_models"
- "get_tools"
- "info"
# Query-only role can only query (no model_override - must use defaults)
- role: "query_only"
actions:
- "query"
- "streaming_query"
# Everyone (*) role gets basic info access
- role: "*"
actions:
- "info"
18 changes: 18 additions & 0 deletions tests/e2e-prow/rhoai/configs/lightspeed-stack.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,21 @@ user_data_collection:

authentication:
module: "noop"

mcp_servers:
# Mock server with client-provided auth - should appear in mcp-auth/client-options response
- name: "github-api"
provider_id: "model-context-protocol"
url: "http://mcp-mock-server:3000"
authorization_headers:
Authorization: "client"
# Mock server with client-provided auth (different header) - should appear in response
- name: "gitlab-api"
provider_id: "model-context-protocol"
url: "http://mcp-mock-server:3000"
authorization_headers:
X-API-Token: "client"
# Mock server with no auth - should NOT appear in response
- name: "public-api"
provider_id: "model-context-protocol"
url: "http://mcp-mock-server:3000"
Loading
Loading