Skip to content

Commit 6005a8d

Browse files
authored
Merge pull request #559 from sunbeam-labs/558-rollback-logging
Rollback logging
2 parents 8a3c214 + db98ea3 commit 6005a8d

File tree

5 files changed

+49
-91
lines changed

5 files changed

+49
-91
lines changed

.github/workflows/integration-tests.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,4 +56,4 @@ jobs:
5656
run: |
5757
sunbeam init --data_fp tests/data/reads/ --profile slurm test/
5858
cat test/config.yaml
59-
sunbeam run test --default-resources slurm_account=runner --profile test/
59+
sunbeam run test --default-resources slurm_account=runner --profile test/ --show-failed-logs

sunbeam/logging.py

Lines changed: 8 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -2,68 +2,19 @@
22
from pathlib import Path
33

44

5-
class ConditionalLevelFormatter(logging.Formatter):
6-
def format(self, record):
7-
# For WARNING and above, include "LEVELNAME: message"
8-
if record.levelno >= logging.WARNING:
9-
return f"{record.levelname}: {record.getMessage()}"
10-
# For lower levels like INFO, just return the message
11-
return record.getMessage()
5+
logging.basicConfig(
6+
level=logging.INFO,
7+
format="%(message)s",
8+
)
129

1310

1411
def get_sunbeam_logger() -> logging.Logger:
15-
"""Basic logger for general library output."""
16-
logger = logging.getLogger("sunbeam")
17-
logger.setLevel(logging.INFO)
12+
return logging.getLogger()
1813

19-
if not logger.handlers:
20-
ch = logging.StreamHandler()
21-
ch.setLevel(logging.INFO)
22-
ch.setFormatter(ConditionalLevelFormatter())
23-
logger.addHandler(ch)
2414

25-
return logger
26-
27-
28-
def get_pipeline_logger(log_fp: Path = None) -> logging.Logger:
29-
"""Sets up logging for the main pipeline entry point."""
30-
logger = logging.getLogger("sunbeam.pipeline")
31-
logger.setLevel(logging.DEBUG)
32-
logger.propagate = False
33-
34-
if not logger.handlers:
35-
# Console handler
36-
ch = logging.StreamHandler()
37-
ch.setLevel(logging.INFO)
38-
ch.setFormatter(ConditionalLevelFormatter())
39-
40-
# File handler
41-
if log_fp is None:
42-
raise ValueError(
43-
"log_fp is None but the logger hasn't been initialized with a file handler yet"
44-
)
45-
fh = logging.FileHandler(log_fp, mode="w")
46-
fh.setLevel(logging.DEBUG)
47-
fh.setFormatter(logging.Formatter("[%(asctime)s] %(levelname)s: %(message)s"))
48-
49-
logger.addHandler(ch)
50-
logger.addHandler(fh)
51-
52-
return logger
53-
54-
55-
class ExtensionLoggerAdapter(logging.LoggerAdapter):
56-
def process(self, msg, kwargs):
57-
return f"[{self.extra['ext'].upper()}] {msg}", kwargs
15+
def get_pipeline_logger(log_fp: Path | None = None) -> logging.Logger:
16+
return logging.getLogger()
5817

5918

6019
def get_extension_logger(name: str) -> logging.Logger:
61-
"""Returns a logger for a specific extension under the pipeline."""
62-
logger = logging.getLogger(f"sunbeam.pipeline.extensions.{name}")
63-
logger.setLevel(logging.DEBUG)
64-
65-
# Remove any direct handlers — let it propagate to sunbeam.pipeline
66-
logger.handlers.clear()
67-
logger.propagate = True
68-
69-
return ExtensionLoggerAdapter(logger, {"ext": name})
20+
return logging.getLogger()

sunbeam/scripts/run.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,12 @@ def main(argv: list[str] = sys.argv):
1818

1919
sc = SunbeamConfig.from_file(configfile)
2020
Cfg = sc.resolved_paths()
21-
log_fp = Cfg["all"]["log_fp"]
2221

2322
# From here on everything is considered part of the "pipeline"
2423
# This means all logs are handled by the pipeline logger (or pipeline extension loggers)
2524
# You could argue it would make more sense to start this at the actual snakemake call
2625
# but this way we can log some relevant setup information that might be useful on post-mortem analysis
27-
logger = get_pipeline_logger(log_fp)
26+
logger = get_pipeline_logger()
2827

2928
snakefile = Path(__file__).parent.parent / "workflow" / "Snakefile"
3029
if not snakefile.exists():

sunbeam/workflow/Snakefile

Lines changed: 21 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ MIN_RUNTIME = int(os.getenv("SUNBEAM_MIN_RUNTIME", 15))
2020

2121
sc = SunbeamConfig(config)
2222
Cfg = sc.resolved_paths()
23-
logger = get_pipeline_logger(Cfg["all"]["log_fp"])
23+
logger = get_pipeline_logger()
2424
logger.debug(f"Sunbeam configuration: {Cfg}")
2525

2626
# Load extensions
@@ -144,49 +144,39 @@ for sbx_name, sbx_fp in sbxs.items():
144144
include: rule_fp
145145

146146

147-
# ---- Add standard features to rules
148-
### NOT FUNCTIONAL ###
149-
# This is here for future reference but as-is does not do anything
150-
# Effectful lines are commented out to avoid errors/unintended behavior
151-
for rule_obj in workflow.rules:
152-
# Determine wildcard string
153-
wildcards_str = ""
154-
if rule_obj.wildcard_names:
155-
wildcards_str = "_".join(f"{{{w}}}" for w in rule_obj.wildcard_names)
156-
157-
# Add sunbeam log file
158-
log_name = (
159-
f"{rule_obj.name}_{wildcards_str}.log" if wildcards_str else f"{rule_obj.name}.log"
160-
)
161-
# rule_obj.log.update({"main": str(LOG_FP / log_name)})
162-
163-
# Add benchmarks
164-
wildcards_str = "_".join(f"{{{w}}}" for w in rule_obj.wildcard_names)
165-
benchmark_name = (
166-
f"{rule_obj.name}_{wildcards_str}.tsv" if wildcards_str else f"{rule_obj.name}.tsv"
167-
)
168-
169-
# try:
170-
# rule_obj.benchmark = str(BENCHMARK_FP / benchmark_name)
171-
# except AssertionError:
172-
# logger.warning(f"Skipping benchmark assignment for rule '{rule_obj.name}' (unsupported)")
173-
# logger.debug(rule_obj.benchmark)
174-
### END NOT FUNCTIONAL ###
175-
176-
177147
# ---- Rule all: run all targets
178148
rule all:
179149
input:
180150
TARGET_ALL,
181151

182152

153+
rule _test_shell:
154+
output:
155+
QC_FP / ".test_shell",
156+
shell:
157+
"""
158+
echo "Test shell rule executed successfully." > {output}
159+
"""
160+
161+
162+
rule _test_script:
163+
output:
164+
QC_FP / ".test_script",
165+
log:
166+
QC_FP / ".test_script.log",
167+
script:
168+
"scripts/_test_script.py"
169+
170+
183171
rule test:
184172
input:
185173
samples=expand(
186174
QC_FP / "00_samples" / "{sample}_{rp}.fastq.gz",
187175
sample=Samples.keys(),
188176
rp=Pairs,
189177
),
178+
test_shell=QC_FP / ".test_shell",
179+
test_script=QC_FP / ".test_script",
190180
run:
191181
logger.info([x for x in input.samples])
192182

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
from pathlib import Path
2+
3+
4+
l = snakemake.log[0]
5+
o = Path(snakemake.output[0])
6+
7+
with open(l, "w") as log:
8+
log.write("HERE\n")
9+
10+
from sunbeam.logging import get_pipeline_logger, get_extension_logger
11+
from sunbeam.project import SampleList, SunbeamConfig, output_subdir
12+
13+
logger = get_pipeline_logger()
14+
logger.info("This works!")
15+
16+
SampleList()
17+
18+
o.write_text("This is a test output file.\n")

0 commit comments

Comments
 (0)