from __future__ import annotations
import os
+from concurrent.futures import ProcessPoolExecutor
import sys
import logging
import subprocess as sp
PYVER,
)
- outputs = []
- for fpin in opt.inputs:
- fpout = fpin.parent / fpin.name.replace("_async", "")
- outputs.append(str(fpout))
- logger.info("converting %s", fpin)
- with fpin.open() as f:
- source = f.read()
+ if opt.jobs == 1:
+ logger.debug("multi-processing disabled")
+ for fpin in opt.inputs:
+ convert(fpin)
+ else:
+ with ProcessPoolExecutor(max_workers=opt.jobs) as executor:
+ outputs = executor.map(convert, opt.inputs)
- tree = ast.parse(source, filename=str(fpin))
- tree = async_to_sync(tree, filepath=fpin)
- output = tree_to_str(tree, fpin)
+ if opt.check:
+ return check([str(o) for o in outputs])
- with fpout.open("w") as f:
- print(output, file=f)
+ return 0
- sp.check_call(["black", "-q", str(fpout)])
- if opt.check:
- return check(outputs)
+def convert(fpin: Path) -> Path:
+ fpout = fpin.parent / fpin.name.replace("_async", "")
+ logger.info("converting %s", fpin)
+ with fpin.open() as f:
+ source = f.read()
- return 0
+ tree = ast.parse(source, filename=str(fpin))
+ tree = async_to_sync(tree, filepath=fpin)
+ output = tree_to_str(tree, fpin)
+
+ with fpout.open("w") as f:
+ print(output, file=f)
+
+ sp.check_call(["black", "-q", str(fpout)])
+
+ return fpout
def check(outputs: list[str]) -> int:
parser.add_argument(
"--all", action="store_true", help="process all the files of the project"
)
+ parser.add_argument(
+ "-j",
+ "--jobs",
+ type=int,
+ metavar="N",
+ help=(
+ "process files concurrently using at most N workers; "
+ "if unspecified, the number of processors on the machine will be used"
+ ),
+ )
container = parser.add_mutually_exclusive_group()
container.add_argument(
"--docker",