File tree Expand file tree Collapse file tree 5 files changed +54
-0
lines changed
examples/platform_specific_deps Expand file tree Collapse file tree 5 files changed +54
-0
lines changed Original file line number Diff line number Diff line change 1+ bazel- *
Original file line number Diff line number Diff line change 1+ load ("@pip//:requirements.bzl" , "requirement" )
2+ load ("@rules_python//python:defs.bzl" , "py_test" )
3+
4+ py_test (
5+ name = "test" ,
6+ srcs = ["pytest_main.py" ],
7+ main = "pytest_main.py" ,
8+ deps = [
9+ requirement ("gptqmodel" ),
10+ ],
11+ )
Original file line number Diff line number Diff line change 1+ module (
2+ name = "platform_specific_deps" ,
3+ version = "0.0.0" ,
4+ compatibility_level = 1 ,
5+ )
6+
7+ bazel_dep (name = "bazel_skylib" , version = "1.7.1" )
8+ bazel_dep (name = "rules_python" , version = "0.0.0" )
9+
10+ # TODO: Replace with builtin uv support if it supports platform specific requirements output
11+ bazel_dep (name = "rules_uv" , version = "0.65.0" )
12+
13+ local_path_override (
14+ module_name = "rules_python" ,
15+ path = "../.." ,
16+ )
17+
18+ python = use_extension ("@rules_python//python/extensions:python.bzl" , "python" )
19+ python .toolchain (
20+ python_version = "3.11" ,
21+ )
22+ python .toolchain (
23+ python_version = "3.9" ,
24+ )
25+ use_repo (python , "python_3_9" )
26+
27+ pip = use_extension ("@rules_python//python/extensions:pip.bzl" , "pip" )
28+ pip .parse (
29+ hub_name = "pip" ,
30+ python_version = "3.9" ,
31+ requirements_lock = "requirements.txt" ,
32+ )
33+ use_repo (pip , "pip" )
Original file line number Diff line number Diff line change 1+ import gptqmodel
2+
3+ print ("worked" )
Original file line number Diff line number Diff line change 1+ --index-url https://pypi.org/simple
2+
3+ gptqmodel @ https://github.com/ModelCloud/GPTQModel/releases/download/v2.0.0/gptqmodel-2.0.0+cu126torch2.6-cp310-cp310-linux_x86_64.whl ; python_full_version == '3.10.*' and sys_platform == 'linux'
4+ gptqmodel @ https://github.com/ModelCloud/GPTQModel/releases/download/v2.0.0/gptqmodel-2.0.0+cu126torch2.6-cp311-cp311-linux_x86_64.whl ; python_full_version == '3.11.*' and sys_platform == 'linux'
5+ gptqmodel @ https://github.com/ModelCloud/GPTQModel/releases/download/v2.0.0/gptqmodel-2.0.0+cu126torch2.6-cp312-cp312-linux_x86_64.whl ; python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'linux'
6+ gptqmodel @ https://github.com/ModelCloud/GPTQModel/releases/download/v2.0.0/gptqmodel-2.0.0+cu126torch2.6-cp39-cp39-linux_x86_64.whl ; python_full_version < '3.10' and sys_platform == 'linux'
You can’t perform that action at this time.
0 commit comments