-
Notifications
You must be signed in to change notification settings - Fork 35
Expand file tree
/
Copy pathgenerate_pyproject.py
More file actions
125 lines (113 loc) · 4.48 KB
/
generate_pyproject.py
File metadata and controls
125 lines (113 loc) · 4.48 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""Generate pyproject-pypi-cpu-py{VER}.toml from template.
PyTorch version defaults can be customized via command-line arguments.
For Python 3.14+, add new arguments following the pattern:
--torch-min-py314 X.X.X --torch-max-py314 X.X
--torch-min-py314-macos X.X.X --torch-max-py314-macos X.X
"""
import argparse
from pathlib import Path
from jinja2 import Environment, FileSystemLoader
def main():
parser = argparse.ArgumentParser(
description="Generate PyPI pyproject.toml files from template",
epilog="NOTE: For future Python versions (3.14+), add new arguments following the pattern above.",
)
# Linux/Windows PyTorch versions (support latest with CUDA 12.9)
parser.add_argument(
"--torch-min-py312",
default="2.8.0",
help="Minimum PyTorch version for Python 3.12 (Linux/Windows): >=X.X.X",
)
parser.add_argument(
"--torch-max-py312",
default="2.9",
help="Maximum PyTorch version for Python 3.12 (Linux/Windows): <X.X (exclusive)",
)
parser.add_argument(
"--torch-min-py313",
default="2.8.0",
help="Minimum PyTorch version for Python 3.13 (Linux/Windows): >=X.X.X",
)
parser.add_argument(
"--torch-max-py313",
default="2.9",
help="Maximum PyTorch version for Python 3.13 (Linux/Windows): <X.X (exclusive)",
)
# macOS PyTorch versions (now matches Linux/Windows as PyTorch 2.8+ is available on PyPI for macOS ARM)
parser.add_argument(
"--torch-min-py312-macos",
default="2.8.0",
help="Minimum PyTorch version for Python 3.12 (macOS): >=X.X.X",
)
parser.add_argument(
"--torch-max-py312-macos",
default="2.9",
help="Maximum PyTorch version for Python 3.12 (macOS): <X.X (exclusive)",
)
parser.add_argument(
"--torch-min-py313-macos",
default="2.8.0",
help="Minimum PyTorch version for Python 3.13 (macOS): >=X.X.X",
)
parser.add_argument(
"--torch-max-py313-macos",
default="2.9",
help="Maximum PyTorch version for Python 3.13 (macOS): <X.X (exclusive)",
)
parser.add_argument("--output-dir", default=".", help="Output directory")
args = parser.parse_args()
# Setup Jinja2
template_dir = Path(__file__).parent.parent
output_dir = Path(args.output_dir)
env = Environment(
loader=FileSystemLoader(template_dir), trim_blocks=True, lstrip_blocks=True
)
template = env.get_template("pyproject-pypi.toml.j2")
# Common template variables
common_vars = dict(
torch_min_py312=args.torch_min_py312,
torch_max_py312=args.torch_max_py312,
torch_min_py313=args.torch_min_py313,
torch_max_py313=args.torch_max_py313,
torch_min_py312_macos=args.torch_min_py312_macos,
torch_max_py312_macos=args.torch_max_py312_macos,
torch_min_py313_macos=args.torch_min_py313_macos,
torch_max_py313_macos=args.torch_max_py313_macos,
)
# Generate CPU configs for each Python version
for py_ver in ["312", "313"]:
py_ver_min = f"3.{py_ver[1:]}"
py_ver_max = f"3.{int(py_ver[1:]) + 1}"
cpu_config = template.render(
variant="cpu",
description_suffix="CPU-only version for Linux, macOS Intel, and macOS ARM",
python_version_min=py_ver_min,
python_version_max=py_ver_max,
**common_vars,
)
(output_dir / f"pyproject-pypi-cpu-py{py_ver}.toml").write_text(cpu_config)
print(
f"Generated pyproject-pypi-cpu-py{py_ver}.toml (requires-python: >={py_ver_min},<{py_ver_max})"
)
# Generate GPU configs for each Python version
for py_ver in ["312", "313"]:
py_ver_min = f"3.{py_ver[1:]}"
py_ver_max = f"3.{int(py_ver[1:]) + 1}"
gpu_config = template.render(
variant="gpu",
description_suffix="GPU (CUDA) version for Linux and Windows",
python_version_min=py_ver_min,
python_version_max=py_ver_max,
**common_vars,
)
(output_dir / f"pyproject-pypi-gpu-py{py_ver}.toml").write_text(gpu_config)
print(
f"Generated pyproject-pypi-gpu-py{py_ver}.toml (requires-python: >={py_ver_min},<{py_ver_max})"
)
if __name__ == "__main__":
main()