|
| 1 | +--- |
| 2 | +categories: ["API Documentation", "REST API"] |
| 3 | +tags: ["REST API", "GPU", "Providers", "Guide"] |
| 4 | +weight: 2 |
| 5 | +title: "GPU Availability Guide" |
| 6 | +linkTitle: "GPU Availability" |
| 7 | +description: "How to find and filter GPU resources across Akash Network providers" |
| 8 | +--- |
| 9 | + |
| 10 | +GPU availability data is embedded in provider responses — there is no standalone GPU endpoint. This guide shows how to query providers and filter for specific GPU resources using the [Providers API](/docs/api-documentation/rest-api/providers-api). |
| 11 | + |
| 12 | +Base URL: `https://console-api.akash.network` |
| 13 | + |
| 14 | +--- |
| 15 | + |
| 16 | +## Finding GPU Providers |
| 17 | + |
| 18 | +### Step 1: Fetch All Providers |
| 19 | + |
| 20 | +```bash |
| 21 | +curl https://console-api.akash.network/v1/providers |
| 22 | +``` |
| 23 | + |
| 24 | +Each provider in the response includes a `gpuModels` array: |
| 25 | + |
| 26 | +```json |
| 27 | +{ |
| 28 | + "owner": "akash1u5cdg7k3gl43mukca4aeultuz8x2j68mgwn28e", |
| 29 | + "isOnline": true, |
| 30 | + "gpuModels": [ |
| 31 | + { |
| 32 | + "vendor": "nvidia", |
| 33 | + "model": "rtx4060ti", |
| 34 | + "ram": "16Gi", |
| 35 | + "interface": "PCIe" |
| 36 | + } |
| 37 | + ], |
| 38 | + "stats": { |
| 39 | + "gpu": { "active": 0, "available": 1, "pending": 0, "total": 1 } |
| 40 | + } |
| 41 | +} |
| 42 | +``` |
| 43 | + |
| 44 | +### Step 2: Filter for Online Providers with GPUs |
| 45 | + |
| 46 | +```javascript |
| 47 | +const response = await fetch("https://console-api.akash.network/v1/providers"); |
| 48 | +const providers = await response.json(); |
| 49 | + |
| 50 | +const gpuProviders = providers.filter( |
| 51 | + (p) => p.isOnline && p.gpuModels.length > 0 |
| 52 | +); |
| 53 | +``` |
| 54 | + |
| 55 | +### Step 3: Filter by GPU Vendor |
| 56 | + |
| 57 | +```javascript |
| 58 | +const nvidiaProviders = gpuProviders.filter((p) => |
| 59 | + p.gpuModels.some((gpu) => gpu.vendor === "nvidia") |
| 60 | +); |
| 61 | +``` |
| 62 | + |
| 63 | +### Step 4: Filter by Specific GPU Model |
| 64 | + |
| 65 | +```javascript |
| 66 | +const t4Providers = gpuProviders.filter((p) => |
| 67 | + p.gpuModels.some((gpu) => gpu.model === "t4") |
| 68 | +); |
| 69 | +``` |
| 70 | + |
| 71 | +### Step 5: Filter by GPU Memory |
| 72 | + |
| 73 | +```javascript |
| 74 | +const highMemProviders = gpuProviders.filter((p) => |
| 75 | + p.gpuModels.some((gpu) => gpu.ram === "80Gi") |
| 76 | +); |
| 77 | +``` |
| 78 | + |
| 79 | +--- |
| 80 | + |
| 81 | +## Checking GPU Availability |
| 82 | + |
| 83 | +The `stats.gpu` field on each provider shows real-time GPU capacity: |
| 84 | + |
| 85 | +| Field | Description | |
| 86 | +|-------|-------------| |
| 87 | +| `stats.gpu.total` | Total GPU units on the provider | |
| 88 | +| `stats.gpu.active` | Currently leased GPUs | |
| 89 | +| `stats.gpu.available` | GPUs available for new deployments | |
| 90 | +| `stats.gpu.pending` | GPUs in pending state | |
| 91 | + |
| 92 | +Filter for providers with available GPUs: |
| 93 | + |
| 94 | +```javascript |
| 95 | +const availableGpuProviders = gpuProviders.filter( |
| 96 | + (p) => p.stats.gpu.available > 0 |
| 97 | +); |
| 98 | +``` |
| 99 | + |
| 100 | +--- |
| 101 | + |
| 102 | +## Complete Example |
| 103 | + |
| 104 | +This script finds all online providers with available NVIDIA GPUs and prints their details: |
| 105 | + |
| 106 | +```javascript |
| 107 | +async function findAvailableGpuProviders(vendor, model) { |
| 108 | + const response = await fetch("https://console-api.akash.network/v1/providers"); |
| 109 | + const providers = await response.json(); |
| 110 | + |
| 111 | + return providers |
| 112 | + .filter((p) => { |
| 113 | + if (!p.isOnline || p.gpuModels.length === 0) return false; |
| 114 | + if (p.stats.gpu.available === 0) return false; |
| 115 | + |
| 116 | + return p.gpuModels.some((gpu) => { |
| 117 | + const vendorMatch = !vendor || gpu.vendor === vendor; |
| 118 | + const modelMatch = !model || gpu.model === model; |
| 119 | + return vendorMatch && modelMatch; |
| 120 | + }); |
| 121 | + }) |
| 122 | + .map((p) => ({ |
| 123 | + owner: p.owner, |
| 124 | + hostUri: p.hostUri, |
| 125 | + gpuModels: p.gpuModels, |
| 126 | + availableGpus: p.stats.gpu.available, |
| 127 | + totalGpus: p.stats.gpu.total, |
| 128 | + })); |
| 129 | +} |
| 130 | + |
| 131 | +// Find all providers with available NVIDIA GPUs |
| 132 | +const providers = await findAvailableGpuProviders("nvidia"); |
| 133 | +console.log(`Found ${providers.length} providers with available NVIDIA GPUs:`); |
| 134 | +providers.forEach((p) => { |
| 135 | + console.log(` ${p.owner} — ${p.availableGpus}/${p.totalGpus} GPUs available`); |
| 136 | + p.gpuModels.forEach((gpu) => { |
| 137 | + console.log(` ${gpu.vendor} ${gpu.model} (${gpu.ram}, ${gpu.interface})`); |
| 138 | + }); |
| 139 | +}); |
| 140 | + |
| 141 | +// Find providers with available T4 GPUs specifically |
| 142 | +const t4Providers = await findAvailableGpuProviders("nvidia", "t4"); |
| 143 | +console.log(`\nFound ${t4Providers.length} providers with available T4 GPUs`); |
| 144 | +``` |
| 145 | + |
| 146 | +--- |
| 147 | + |
| 148 | +## GPU Model Fields Reference |
| 149 | + |
| 150 | +Each entry in the `gpuModels` array contains: |
| 151 | + |
| 152 | +| Field | Type | Description | Examples | |
| 153 | +|-------|------|-------------|----------| |
| 154 | +| `vendor` | string | GPU manufacturer | `"nvidia"` | |
| 155 | +| `model` | string | GPU model identifier | `"rtx4060ti"`, `"t4"`, `"a100"`, `"h100"` | |
| 156 | +| `ram` | string | GPU memory | `"16Gi"`, `"40Gi"`, `"80Gi"` | |
| 157 | +| `interface` | string | Connection interface | `"PCIe"`, `"SXM"` | |
| 158 | + |
| 159 | +--- |
| 160 | + |
| 161 | +**See also:** [Providers API Reference](/docs/api-documentation/rest-api/providers-api) for the complete provider response schema. |
0 commit comments