Skip to content

Commit aff43bf

Browse files
committed
chore(ruff): Unsafe fixes for Python 3.10+
- Convert f-string logging statements to % formatting for better logging performance (G004 fix) This commit applies unsafe ruff fixes from --unsafe-fixes including: - Logging format improvements - Type annotation modernization All tests passing after changes.
1 parent 14d852a commit aff43bf

File tree

2 files changed

+9
-9
lines changed

2 files changed

+9
-9
lines changed

src/unihan_etl/core.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -229,13 +229,13 @@ def has_valid_zip(zip_path: StrPath) -> bool:
229229
zip_path = pathlib.Path(zip_path)
230230

231231
if not zip_path.is_file():
232-
log.info(f"Exists, but is not a file: {zip_path}")
232+
log.info("Exists, but is not a file: %s", zip_path)
233233
return False
234234

235235
if zipfile.is_zipfile(zip_path):
236-
log.info(f"Exists, is valid zip: {zip_path}")
236+
log.info("Exists, is valid zip: %s", zip_path)
237237
return True
238-
log.info(f"Not a valid zip: {zip_path}")
238+
log.info("Not a valid zip: %s", zip_path)
239239
return False
240240

241241

@@ -296,7 +296,7 @@ def not_downloaded() -> bool:
296296

297297
if (no_unihan_files_exist() and not_downloaded()) or not cache:
298298
log.info("Downloading Unihan.zip...")
299-
log.info(f"{url} to {dest}")
299+
log.info("%s to %s", url, dest)
300300
if pathlib.Path(url).is_file():
301301
shutil.copy(url, dest)
302302
else:
@@ -345,7 +345,7 @@ def extract_zip(zip_path: pathlib.Path, dest_dir: pathlib.Path) -> zipfile.ZipFi
345345
The extracted zip.
346346
"""
347347
z = zipfile.ZipFile(zip_path)
348-
log.info(f"extract_zip dest dir: {dest_dir}")
348+
log.info("extract_zip dest dir: %s", dest_dir)
349349
z.extractall(dest_dir)
350350

351351
return z
@@ -446,14 +446,14 @@ def export_csv(
446446
with pathlib.Path(destination).open("w", encoding="utf-8") as f:
447447
csvwriter = csv.writer(f)
448448
csvwriter.writerows(listified_data)
449-
log.info(f"Saved output to: {destination}")
449+
log.info("Saved output to: %s", destination)
450450

451451

452452
def export_json(data: UntypedNormalizedData, destination: StrPath) -> None:
453453
"""Export UNIHAN in JSON format."""
454454
with codecs.open(str(destination), "w", encoding="utf-8") as f:
455455
json.dump(data, f, indent=2, ensure_ascii=False)
456-
log.info(f"Saved output to: {destination}")
456+
log.info("Saved output to: %s", destination)
457457

458458

459459
def export_yaml(data: UntypedNormalizedData, destination: StrPath) -> None:
@@ -462,7 +462,7 @@ def export_yaml(data: UntypedNormalizedData, destination: StrPath) -> None:
462462

463463
with codecs.open(str(destination), "w", encoding="utf-8") as f:
464464
yaml.safe_dump(data, stream=f, allow_unicode=True, default_flow_style=False)
465-
log.info(f"Saved output to: {destination}")
465+
log.info("Saved output to: %s", destination)
466466

467467

468468
def is_default_option(field_name: str, val: t.Any) -> bool:

type_stubs/pygments/lexer.pyi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,4 @@ class RegexLexer(Lexer): ...
1919

2020
def bygroups(
2121
*args: Lexer | _TokenType,
22-
) -> Generator[None, Lexer | _TokenType, None]: ...
22+
) -> Generator[None, Lexer | _TokenType]: ...

0 commit comments

Comments
 (0)