-
Notifications
You must be signed in to change notification settings - Fork 36
Add RAPIDS Doctor Check for cuGraph-PyG and pylibwholegraph #418
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 11 commits
40cf546
1aef531
f44a527
2622444
3db88f6
cb93dc2
d9e7947
96b7b26
6be24a0
0c623f7
6c3ecd0
23b948b
fa5a5a6
25f0541
02c66e4
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,84 @@ | ||
| # SPDX-FileCopyrightText: Copyright (c) 2026, NVIDIA CORPORATION. | ||
| # SPDX-License-Identifier: Apache-2.0 | ||
|
|
||
| """ | ||
| Smoke check for `rapids doctor` (RAPIDS CLI). | ||
|
|
||
| See: https://github.com/rapidsai/rapids-cli#check-plugins | ||
| """ | ||
|
|
||
|
|
||
| def cugraph_pyg_smoke_check(**kwargs): | ||
| """ | ||
| A quick check to ensure cugraph-pyg can be imported and its core | ||
| submodules are loadable. | ||
| """ | ||
| try: | ||
| import cugraph_pyg | ||
|
|
||
| # Ensure core submodules load (touches pylibwholegraph, torch-geometric, etc.) | ||
| import cugraph_pyg.data | ||
| import cugraph_pyg.tensor | ||
|
|
||
| except ImportError as e: | ||
| raise ImportError( | ||
| "cugraph-pyg or its dependencies could not be imported. " | ||
| "Tip: install with `pip install cugraph-pyg` or use a RAPIDS conda environment." | ||
| ) from e | ||
|
|
||
| if not hasattr(cugraph_pyg, "__version__") or not cugraph_pyg.__version__: | ||
| raise AssertionError( | ||
| "cugraph-pyg smoke check failed: __version__ not found or empty" | ||
| ) | ||
|
|
||
| from cugraph_pyg.utils.imports import import_optional, MissingModule | ||
|
|
||
| torch = import_optional("torch") | ||
|
|
||
| if isinstance(torch, MissingModule) or not torch.cuda.is_available(): | ||
| import warnings | ||
|
|
||
| warnings.warn( | ||
| "PyTorch with CUDA support is required to use cuGraph-PyG. " | ||
| "Please install PyTorch from PyPI or Conda-Forge." | ||
| ) | ||
| else: | ||
| import os | ||
| from cugraph_pyg.data import GraphStore | ||
|
|
||
| addr = os.environ.get("MASTER_ADDR", "") | ||
| port = os.environ.get("MASTER_PORT", "") | ||
| local_rank = os.environ.get("LOCAL_RANK", "") | ||
| world_size = os.environ.get("WORLD_SIZE", "") | ||
| local_world_size = os.environ.get("LOCAL_WORLD_SIZE", "") | ||
| rank = os.environ.get("RANK", "") | ||
|
||
|
|
||
| try: | ||
| os.environ["MASTER_ADDR"] = "localhost" | ||
| os.environ["MASTER_PORT"] = "29505" | ||
| os.environ["LOCAL_RANK"] = "0" | ||
| os.environ["WORLD_SIZE"] = "1" | ||
| os.environ["LOCAL_WORLD_SIZE"] = "1" | ||
| os.environ["RANK"] = "0" | ||
| torch.distributed.init_process_group("nccl") | ||
|
|
||
| graph_store = GraphStore() | ||
| graph_store.put_edge_index( | ||
| torch.tensor([[0, 1], [1, 2]]), | ||
| ("person", "knows", "person"), | ||
| "coo", | ||
| False, | ||
| (3, 3), | ||
alexbarghi-nv marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| ) | ||
| edge_index = graph_store.get_edge_index( | ||
| ("person", "knows", "person"), "coo" | ||
| ) | ||
| assert edge_index.shape == torch.Size([2, 2]) | ||
| finally: | ||
| os.environ["MASTER_ADDR"] = addr | ||
| os.environ["MASTER_PORT"] = port | ||
| os.environ["LOCAL_RANK"] = local_rank | ||
| os.environ["WORLD_SIZE"] = world_size | ||
| os.environ["LOCAL_WORLD_SIZE"] = local_world_size | ||
| os.environ["RANK"] = rank | ||
| torch.distributed.destroy_process_group() | ||
|
||
| Original file line number | Diff line number | Diff line change | ||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| @@ -0,0 +1,41 @@ | ||||||||||||||||||||||||||||
| # SPDX-FileCopyrightText: Copyright (c) 2026, NVIDIA CORPORATION. | ||||||||||||||||||||||||||||
| # SPDX-License-Identifier: Apache-2.0 | ||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||
| """ | ||||||||||||||||||||||||||||
| Smoke check for `rapids doctor` (RAPIDS CLI). | ||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||
| See: https://github.com/rapidsai/rapids-cli#check-plugins | ||||||||||||||||||||||||||||
| """ | ||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||
| def pylibwholegraph_smoke_check(**kwargs): | ||||||||||||||||||||||||||||
| """ | ||||||||||||||||||||||||||||
| A quick check to ensure pylibwholegraph can be imported and the | ||||||||||||||||||||||||||||
| native library loads correctly. | ||||||||||||||||||||||||||||
alexbarghi-nv marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||||||||||||||||||||||||
| """ | ||||||||||||||||||||||||||||
| try: | ||||||||||||||||||||||||||||
| import pylibwholegraph | ||||||||||||||||||||||||||||
| except ImportError as e: | ||||||||||||||||||||||||||||
| raise ImportError( | ||||||||||||||||||||||||||||
| "pylibwholegraph or its dependencies could not be imported. " | ||||||||||||||||||||||||||||
| "Tip: install with `pip install pylibwholegraph` or use a RAPIDS conda environment." | ||||||||||||||||||||||||||||
| ) from e | ||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||
| if not hasattr(pylibwholegraph, "__version__") or not pylibwholegraph.__version__: | ||||||||||||||||||||||||||||
| raise AssertionError( | ||||||||||||||||||||||||||||
| "pylibwholegraph smoke check failed: __version__ not found or empty" | ||||||||||||||||||||||||||||
| ) | ||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||
| try: | ||||||||||||||||||||||||||||
| import torch | ||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||
| assert torch.cuda.is_available() | ||||||||||||||||||||||||||||
alexbarghi-nv marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||
| except ImportError: | ||||||||||||||||||||||||||||
alexbarghi-nv marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||||||||||||||||||||||||||||
| try: | |
| import torch | |
| assert torch.cuda.is_available() | |
| except ImportError: | |
| try: | |
| import torch | |
| if not torch.cuda.is_available(): | |
| raise ImportError("torch.cuda is not available") | |
| except (ImportError, AssertionError): |
Or more directly: replace the assert with an explicit if/raise ImportError so the single except ImportError handles both the missing-package and no-CUDA cases.
Uh oh!
There was an error while loading. Please reload this page.