Skip to content

Commit ff6a940

Browse files
committed
Update config file approach
1 parent a0e727b commit ff6a940

File tree

3 files changed

+60
-4
lines changed

3 files changed

+60
-4
lines changed

src/spark_history_mcp/config/config.py

Lines changed: 49 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,50 @@
11
import os
2-
from typing import Dict, List, Literal, Optional
2+
from typing import Any, Dict, List, Literal, Optional, Tuple
33

44
import yaml
55
from pydantic import Field
6+
from pydantic.fields import FieldInfo
67
from pydantic_settings import (
78
BaseSettings,
89
PydanticBaseSettingsSource,
910
SettingsConfigDict,
1011
)
1112

1213

14+
class YamlConfigSettingsSource(PydanticBaseSettingsSource):
15+
"""Custom settings source that loads configuration from a YAML file.
16+
17+
The file path is determined by the SHS_MCP_CONFIG environment variable,
18+
defaulting to 'config.yaml' if not set.
19+
"""
20+
21+
def get_field_value(
22+
self, field: FieldInfo, field_name: str
23+
) -> Tuple[Any, str, bool]:
24+
# Not used for this implementation
25+
return None, field_name, False
26+
27+
def __call__(self) -> Dict[str, Any]:
28+
"""Load and return the YAML configuration data."""
29+
config_path = os.getenv("SHS_MCP_CONFIG", "config.yaml")
30+
is_explicitly_set = "SHS_MCP_CONFIG" in os.environ
31+
32+
if not os.path.exists(config_path):
33+
# If the config file was explicitly specified but doesn't exist, fail fast
34+
if is_explicitly_set:
35+
raise FileNotFoundError(
36+
f"Config file not found: {config_path}\n"
37+
f"Specified via: SHS_MCP_CONFIG environment variable"
38+
)
39+
# If using default and it doesn't exist, return empty (will use defaults)
40+
return {}
41+
42+
with open(config_path, "r") as f:
43+
config_data = yaml.safe_load(f)
44+
45+
return config_data or {}
46+
47+
1348
class AuthConfig(BaseSettings):
1449
"""Authentication configuration for the Spark server."""
1550

@@ -77,4 +112,16 @@ def settings_customise_sources(
77112
dotenv_settings: PydanticBaseSettingsSource,
78113
file_secret_settings: PydanticBaseSettingsSource,
79114
) -> tuple[PydanticBaseSettingsSource, ...]:
80-
return env_settings, dotenv_settings, init_settings, file_secret_settings
115+
# Precedence order (highest to lowest):
116+
# 1. Environment variables
117+
# 2. .env file
118+
# 3. YAML config file (from SHS_MCP_CONFIG)
119+
# 4. Init settings (constructor arguments)
120+
# 5. File secrets
121+
return (
122+
env_settings,
123+
dotenv_settings,
124+
YamlConfigSettingsSource(settings_cls),
125+
init_settings,
126+
file_secret_settings,
127+
)

src/spark_history_mcp/core/app.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,8 @@ def default(self, obj):
3333

3434
@asynccontextmanager
3535
async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]:
36-
config = Config.from_file("config.yaml")
36+
# Config() automatically loads from SHS_MCP_CONFIG env var (set in main.py)
37+
config = Config()
3738

3839
clients: dict[str, SparkRestClient] = {}
3940
default_client = None

src/spark_history_mcp/core/main.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,19 @@ def main():
3030
try:
3131
logger.info("Starting Spark History Server MCP...")
3232
logger.info(f"Using config file: {args.config}")
33-
config = Config.from_file(args.config)
33+
34+
# Set the config file path in environment for Pydantic Settings
35+
os.environ["SHS_MCP_CONFIG"] = args.config
36+
37+
# Now Config() will automatically load from the specified YAML file
38+
config = Config()
3439
if config.mcp.debug:
3540
logger.setLevel(logging.DEBUG)
3641
logger.debug(json.dumps(json.loads(config.model_dump_json()), indent=4))
3742
app.run(config)
43+
except FileNotFoundError as e:
44+
logger.error(f"Configuration error: {e}")
45+
sys.exit(1)
3846
except Exception as e:
3947
logger.error(f"Failed to start MCP server: {e}")
4048
sys.exit(1)

0 commit comments

Comments
 (0)