Skip to content

Commit 3726ac7

Browse files
authored
Support dot env file (#72)
Signed-off-by: Manabu McCloskey <[email protected]>
1 parent e697f35 commit 3726ac7

File tree

9 files changed

+271
-59
lines changed

9 files changed

+271
-59
lines changed

.env.example

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
# Spark History Server MCP Configuration
22

33
# MCP Server Settings
4-
MCP_PORT=18888
5-
MCP_DEBUG=false
4+
SHS_MCP_PORT=18888 # Port for MCP server (default: 18888)
5+
SHS_MCP_DEBUG=true # Enable debug mode (default: false)
6+
SHS_MCP_ADDRESS=0.0.0.0 # Address for MCP server (default: localhost)
7+
SHS_MCP_TRANSPORT=treamable-http
68

7-
# Spark Authentication (Optional)
8-
# SPARK_USERNAME=your_spark_username
9-
# SPARK_PASSWORD=your_spark_password
10-
# SPARK_TOKEN=your_spark_token
11-
12-
# Example for production:
13-
# SPARK_USERNAME=prod_user
14-
# SPARK_PASSWORD=secure_password_here
15-
# SPARK_TOKEN=jwt_token_here
9+
# Spark History Server Settings
10+
# SHS_SERVERS_*_URL - URL for a specific server
11+
# SHS_SERVERS_*_AUTH_USERNAME - Username for a specific server
12+
# SHS_SERVERS_*_AUTH_PASSWORD - Password for a specific server
13+
# SHS_SERVERS_*_AUTH_TOKEN - Token for a specific server
14+
# SHS_SERVERS_*_VERIFY_SSL - Whether to verify SSL for a specific server (true/false)
15+
# SHS_SERVERS_*_EMR_CLUSTER_ARN - EMR cluster ARN for a specific server

README.md

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -262,13 +262,17 @@ servers:
262262
```
263263

264264
### 🔐 Environment Variables
265-
```bash
266-
SHS_SPARK_USERNAME=your_username
267-
SHS_SPARK_PASSWORD=your_password
268-
SHS_SPARK_TOKEN=your_jwt_token
269-
SHS_MCP_PORT=18888
270-
SHS_MCP_DEBUG=false
271-
SHS_MCP_ADDRESS=0.0.0.0
265+
```
266+
SHS_MCP_PORT - Port for MCP server (default: 18888)
267+
SHS_MCP_DEBUG - Enable debug mode (default: false)
268+
SHS_MCP_ADDRESS - Address for MCP server (default: localhost)
269+
SHS_MCP_TRANSPORT - MCP transport mode (default: streamable-http)
270+
SHS_SERVERS_*_URL - URL for a specific server
271+
SHS_SERVERS_*_AUTH_USERNAME - Username for a specific server
272+
SHS_SERVERS_*_AUTH_PASSWORD - Password for a specific server
273+
SHS_SERVERS_*_AUTH_TOKEN - Token for a specific server
274+
SHS_SERVERS_*_VERIFY_SSL - Whether to verify SSL for a specific server (true/false)
275+
SHS_SERVERS_*_EMR_CLUSTER_ARN - EMR cluster ARN for a specific server
272276
```
273277
274278
## 🤖 AI Agent Integration

config.yaml

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ servers:
55
url: "http://localhost:18080"
66
# Optional authentication (can also use environment variables).
77
# auth:
8-
# username: ${SHS_SPARK_USERNAME}
9-
# password: ${SHS_SPARK_PASSWORD}
8+
# username: ${SHS_SERVERS_LOCAL_AUTH_USERNAME}
9+
# password: ${SHS_SERVERS_LOCAL_AUTH_PASSWORD}
1010
# token: ${SHS_SPARK_TOKEN}
1111

1212
# Production server example
@@ -15,17 +15,17 @@ servers:
1515
# verify_ssl: true
1616
# auth:
1717
# Use environment variables for production
18-
# username: ${SHS_SPARK_USERNAME}
19-
# password: ${SHS_SPARK_PASSWORD}
18+
# username: ${SHS_SERVERS_PRODUCTION_AUTH_USERNAME}
19+
# password: ${SHS_SERVERS_PRODUCTION_AUTH_PASSWORD}
2020
# token: ${SHS_SPARK_TOKEN}
2121

2222
# Staging server example
2323
# staging:
2424
# url: "https://staging-spark-history.company.com:18080"
2525
# verify_ssl: true
2626
# auth:
27-
# username: ${SHS_SPARK_USERNAME}
28-
# token: ${SHS_SPARK_TOKEN}
27+
# username: ${SHS_SERVERS_STAGING_AUTH_USERNAME}
28+
# token: ${SHS_SERVERS_STAGING_AUTH_PASSWORD}
2929

3030
# AWS Glue Spark History Server example
3131
# glue_ec2:
@@ -43,9 +43,15 @@ mcp:
4343
debug: true
4444
address: localhost
4545

46-
# Environment Variables:
47-
# SHS_SPARK_USERNAME - Default username for authentication
48-
# SHS_SPARK_PASSWORD - Default password for authentication
49-
# SHS_SPARK_TOKEN - Default token for authentication
46+
47+
# Available Environment Variables:
5048
# SHS_MCP_PORT - Port for MCP server (default: 18888)
5149
# SHS_MCP_DEBUG - Enable debug mode (default: false)
50+
# SHS_MCP_ADDRESS - Address for MCP server (default: localhost)
51+
# SHS_MCP_TRANSPORT - MCP transport mode (default: streamable-http)
52+
# SHS_SERVERS_*_URL - URL for a specific server
53+
# SHS_SERVERS_*_AUTH_USERNAME - Username for a specific server
54+
# SHS_SERVERS_*_AUTH_PASSWORD - Password for a specific server
55+
# SHS_SERVERS_*_AUTH_TOKEN - Token for a specific server
56+
# SHS_SERVERS_*_VERIFY_SSL - Whether to verify SSL for a specific server (true/false)
57+
# SHS_SERVERS_*_EMR_CLUSTER_ARN - EMR cluster ARN for a specific server

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ dependencies = [
2525
"requests~=2.32.4",
2626
"pydantic~=2.4",
2727
"boto3~=1.34",
28+
"pydantic-settings>=2.9.1",
2829
]
2930

3031
[build-system]

src/spark_history_mcp/config/config.py

Lines changed: 37 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -2,53 +2,55 @@
22
from typing import Dict, List, Literal, Optional
33

44
import yaml
5-
from pydantic import BaseModel, Field
5+
from pydantic import Field
6+
from pydantic_settings import (
7+
BaseSettings,
8+
PydanticBaseSettingsSource,
9+
SettingsConfigDict,
10+
)
611

712

8-
class AuthConfig(BaseModel):
13+
class AuthConfig(BaseSettings):
914
"""Authentication configuration for the Spark server."""
1015

11-
username: str = Field(None, alias="username")
12-
password: str = Field(None, alias="password")
13-
token: str = Field(None, alias="token")
16+
username: Optional[str] = Field(None)
17+
password: Optional[str] = Field(None)
18+
token: Optional[str] = Field(None)
1419

15-
def __init__(self, **data):
16-
# Support environment variables for sensitive data
17-
if not data.get("username"):
18-
data["username"] = os.getenv("SHS_SPARK_USERNAME")
19-
if not data.get("password"):
20-
data["password"] = os.getenv("SHS_SPARK_PASSWORD")
21-
if not data.get("token"):
22-
data["token"] = os.getenv("SHS_SPARK_TOKEN")
23-
super().__init__(**data)
2420

25-
26-
class ServerConfig(BaseModel):
21+
class ServerConfig(BaseSettings):
2722
"""Server configuration for the Spark server."""
2823

2924
url: Optional[str] = None
30-
auth: AuthConfig = Field(None, alias="auth")
31-
default: bool = Field(False, alias="default")
32-
verify_ssl: bool = Field(True, alias="verify_ssl")
25+
auth: AuthConfig = Field(default_factory=AuthConfig, exclude=True)
26+
default: bool = False
27+
verify_ssl: bool = True
3328
emr_cluster_arn: Optional[str] = None # EMR specific field
3429

3530

36-
class McpConfig(BaseModel):
31+
class McpConfig(BaseSettings):
3732
"""Configuration for the MCP server."""
3833

3934
transports: List[Literal["stdio", "sse", "streamable-http"]] = Field(
4035
default_factory=list
4136
)
42-
address: str = Field(default="localhost")
43-
port: str = Field(default="18888")
44-
debug: bool = Field(default=False)
37+
address: Optional[str] = "localhost"
38+
port: Optional[int | str] = "18888"
39+
debug: Optional[bool] = False
40+
model_config = SettingsConfigDict(extra="ignore")
4541

4642

47-
class Config(BaseModel):
43+
class Config(BaseSettings):
4844
"""Configuration for the Spark client."""
4945

5046
servers: Dict[str, ServerConfig]
5147
mcp: Optional[McpConfig] = None
48+
model_config = SettingsConfigDict(
49+
env_prefix="SHS_",
50+
env_nested_delimiter="_",
51+
env_file=".env",
52+
env_file_encoding="utf-8",
53+
)
5254

5355
@classmethod
5456
def from_file(cls, file_path: str) -> "Config":
@@ -60,3 +62,14 @@ def from_file(cls, file_path: str) -> "Config":
6062
config_data = yaml.safe_load(f)
6163

6264
return cls.model_validate(config_data)
65+
66+
@classmethod
67+
def settings_customise_sources(
68+
cls,
69+
settings_cls: type[BaseSettings],
70+
init_settings: PydanticBaseSettingsSource,
71+
env_settings: PydanticBaseSettingsSource,
72+
dotenv_settings: PydanticBaseSettingsSource,
73+
file_secret_settings: PydanticBaseSettingsSource,
74+
) -> tuple[PydanticBaseSettingsSource, ...]:
75+
return env_settings, dotenv_settings, init_settings, file_secret_settings

src/spark_history_mcp/core/app.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -65,12 +65,10 @@ async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]:
6565
yield AppContext(clients=clients, default_client=default_client)
6666

6767

68-
def run():
69-
config = Config.from_file("config.yaml")
70-
71-
mcp.settings.host = os.getenv("SHS_MCP_ADDRESS", config.mcp.address)
72-
mcp.settings.port = int(os.getenv("SHS_MCP_PORT", config.mcp.port))
73-
mcp.settings.debug = bool(os.getenv("SHS_MCP_DEBUG", config.mcp.debug))
68+
def run(config: Config):
69+
mcp.settings.host = config.mcp.address
70+
mcp.settings.port = int(config.mcp.port)
71+
mcp.settings.debug = bool(config.mcp.debug)
7472
mcp.run(transport=os.getenv("SHS_MCP_TRANSPORT", config.mcp.transports[0]))
7573

7674

src/spark_history_mcp/core/main.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
"""Main entry point for Spark History Server MCP."""
22

3+
import json
34
import logging
45
import sys
56

7+
from spark_history_mcp.config.config import Config
68
from spark_history_mcp.core import app
79

810
# Configure logging
@@ -16,7 +18,11 @@ def main():
1618
"""Main entry point."""
1719
try:
1820
logger.info("Starting Spark History Server MCP...")
19-
app.run()
21+
config = Config.from_file("config.yaml")
22+
if config.mcp.debug:
23+
logger.setLevel(logging.DEBUG)
24+
logger.debug(json.dumps(json.loads(config.model_dump_json()), indent=4))
25+
app.run(config)
2026
except Exception as e:
2127
logger.error(f"Failed to start MCP server: {e}")
2228
sys.exit(1)

0 commit comments

Comments
 (0)