|
| 1 | +import logging |
| 2 | +import xml.etree.ElementTree as ET |
| 3 | +from typing import Optional |
| 4 | + |
| 5 | +from addon_service.common.exceptions import ( |
| 6 | + ItemNotFound, |
| 7 | + UnexpectedAddonError, |
| 8 | +) |
| 9 | +from addon_toolkit.interfaces import storage |
| 10 | + |
| 11 | + |
| 12 | +logger = logging.getLogger(__name__) |
| 13 | + |
| 14 | + |
| 15 | +class AzureBlobStorageImp(storage.StorageAddonHttpRequestorImp): |
| 16 | + """Storage on Azure Blob Storage |
| 17 | +
|
| 18 | + see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-service-rest-api |
| 19 | + and https://learn.microsoft.com/en-us/azure/app-service/configure-authentication-provider-aad?tabs=workforce-configuration |
| 20 | + """ |
| 21 | + |
| 22 | + API_VERSION = "2025-07-05" |
| 23 | + MAX_RESULTS = 1000 |
| 24 | + |
| 25 | + async def get_external_account_id(self, auth_result_extras: dict[str, str]) -> str: |
| 26 | + if not self.config.external_api_url: |
| 27 | + return "" |
| 28 | + |
| 29 | + try: |
| 30 | + url_parts = self.config.external_api_url.replace("https://", "").split(".") |
| 31 | + if ( |
| 32 | + len(url_parts) >= 4 |
| 33 | + and url_parts[1:4] == ["blob", "core", "windows"] |
| 34 | + and url_parts[0] |
| 35 | + ): |
| 36 | + return url_parts[0] |
| 37 | + except Exception: |
| 38 | + logger.error("Failed to parse external API URL for Azure Blob Storage") |
| 39 | + return "" |
| 40 | + |
| 41 | + async def build_wb_config(self) -> dict: |
| 42 | + root_parts = self.config.connected_root_id.split(":/") |
| 43 | + return { |
| 44 | + "account_name": self.config.external_account_id, |
| 45 | + "container": root_parts[0], |
| 46 | + "base_folder": root_parts[1] if len(root_parts) > 1 else "", |
| 47 | + } |
| 48 | + |
| 49 | + @property |
| 50 | + def api_headers(self) -> dict[str, str]: |
| 51 | + return {"x-ms-version": self.API_VERSION} |
| 52 | + |
| 53 | + def _parse_containers(self, xml_root: ET.Element) -> list[storage.ItemResult]: |
| 54 | + containers = [] |
| 55 | + for container in xml_root.findall(".//Container"): |
| 56 | + name_elem = container.find("Name") |
| 57 | + if name_elem is not None and name_elem.text: |
| 58 | + containers.append( |
| 59 | + storage.ItemResult( |
| 60 | + item_id=f"{name_elem.text}:/", |
| 61 | + item_name=f"{name_elem.text}/", |
| 62 | + item_type=storage.ItemType.FOLDER, |
| 63 | + ) |
| 64 | + ) |
| 65 | + return containers |
| 66 | + |
| 67 | + def _parse_blob_items( |
| 68 | + self, |
| 69 | + xml_root: ET.Element, |
| 70 | + container_name: str, |
| 71 | + prefix: str, |
| 72 | + item_type: Optional[storage.ItemType], |
| 73 | + ) -> list[storage.ItemResult]: |
| 74 | + items = [] |
| 75 | + |
| 76 | + for blob_prefix in xml_root.findall(".//BlobPrefix"): |
| 77 | + name_elem = blob_prefix.find("Name") |
| 78 | + if name_elem is not None and name_elem.text: |
| 79 | + folder_path = name_elem.text |
| 80 | + folder_name = folder_path.rstrip("/").split("/")[-1] |
| 81 | + item_result = storage.ItemResult( |
| 82 | + item_id=f"{container_name}:/{folder_path}", |
| 83 | + item_name=f"{folder_name}/", |
| 84 | + item_type=storage.ItemType.FOLDER, |
| 85 | + ) |
| 86 | + if item_type is None or item_result.item_type == item_type: |
| 87 | + items.append(item_result) |
| 88 | + |
| 89 | + for blob in xml_root.findall(".//Blob"): |
| 90 | + name_elem = blob.find("Name") |
| 91 | + if name_elem is not None and name_elem.text: |
| 92 | + blob_name = name_elem.text |
| 93 | + |
| 94 | + if prefix: |
| 95 | + if not blob_name.startswith(prefix): |
| 96 | + continue |
| 97 | + relative_name = blob_name.removeprefix(prefix) |
| 98 | + else: |
| 99 | + relative_name = blob_name |
| 100 | + |
| 101 | + if "/" not in relative_name: |
| 102 | + item_result = storage.ItemResult( |
| 103 | + item_id=f"{container_name}:/{blob_name}", |
| 104 | + item_name=relative_name, |
| 105 | + item_type=storage.ItemType.FILE, |
| 106 | + ) |
| 107 | + if item_type is None or item_result.item_type == item_type: |
| 108 | + items.append(item_result) |
| 109 | + return items |
| 110 | + |
| 111 | + async def list_root_items(self, page_cursor: str = "") -> storage.ItemSampleResult: |
| 112 | + try: |
| 113 | + async with self.network.GET( |
| 114 | + "?comp=list", |
| 115 | + headers=self.api_headers, |
| 116 | + query={"maxresults": self.MAX_RESULTS}, |
| 117 | + ) as response: |
| 118 | + xml_root = ET.fromstring(await response.text_content()) |
| 119 | + return storage.ItemSampleResult(items=self._parse_containers(xml_root)) |
| 120 | + except Exception as e: |
| 121 | + logger.error(f"Failed to list containers: {str(e)}") |
| 122 | + raise UnexpectedAddonError("Failed to list containers") |
| 123 | + |
| 124 | + async def list_child_items( |
| 125 | + self, |
| 126 | + item_id: str, |
| 127 | + page_cursor: str = "", |
| 128 | + item_type: Optional[storage.ItemType] = None, |
| 129 | + ) -> storage.ItemSampleResult: |
| 130 | + container_name, prefix = self._parse_item_id(item_id) |
| 131 | + |
| 132 | + query_params = { |
| 133 | + "restype": "container", |
| 134 | + "comp": "list", |
| 135 | + "maxresults": self.MAX_RESULTS, |
| 136 | + "delimiter": "/", |
| 137 | + } |
| 138 | + if prefix: |
| 139 | + query_params["prefix"] = prefix |
| 140 | + |
| 141 | + try: |
| 142 | + async with self.network.GET( |
| 143 | + container_name, |
| 144 | + headers=self.api_headers, |
| 145 | + query=query_params, |
| 146 | + ) as response: |
| 147 | + xml_root = ET.fromstring(await response.text_content()) |
| 148 | + items = self._parse_blob_items( |
| 149 | + xml_root, container_name, prefix, item_type |
| 150 | + ) |
| 151 | + return storage.ItemSampleResult(items=items) |
| 152 | + except Exception as e: |
| 153 | + logger.error(f"Failed to list blobs in {item_id}: {str(e)}") |
| 154 | + raise UnexpectedAddonError("Failed to list blobs") |
| 155 | + |
| 156 | + async def get_item_info(self, item_id: str) -> storage.ItemResult: |
| 157 | + container_name, path = self._parse_item_id(item_id) |
| 158 | + |
| 159 | + try: |
| 160 | + if not path: |
| 161 | + async with self.network.GET( |
| 162 | + container_name, |
| 163 | + headers=self.api_headers, |
| 164 | + query={"restype": "container"}, |
| 165 | + ) as _: |
| 166 | + return storage.ItemResult( |
| 167 | + item_id=item_id, |
| 168 | + item_name=container_name, |
| 169 | + item_type=storage.ItemType.FOLDER, |
| 170 | + ) |
| 171 | + else: |
| 172 | + async with self.network.GET( |
| 173 | + f"{container_name}/{path}", |
| 174 | + headers=self.api_headers, |
| 175 | + query={"comp": "metadata"}, |
| 176 | + ) as _: |
| 177 | + return storage.ItemResult( |
| 178 | + item_id=item_id, |
| 179 | + item_name=path.split("/")[-1], |
| 180 | + item_type=storage.ItemType.FILE, |
| 181 | + ) |
| 182 | + except Exception as e: |
| 183 | + logger.error(f"Failed to get item info for {item_id}: {str(e)}") |
| 184 | + raise ItemNotFound(f"Item {item_id} not found") |
| 185 | + |
| 186 | + def _parse_item_id(self, item_id: str) -> tuple[str, str]: |
| 187 | + """ |
| 188 | + Parse Azure Blob Storage item ID and return container name and path. |
| 189 | +
|
| 190 | + Formats: 'container' or 'container:/path' |
| 191 | + Returns: (container_name, path) |
| 192 | + """ |
| 193 | + if not item_id: |
| 194 | + raise ValueError( |
| 195 | + "Empty item_id provided. Expected 'container' or 'container:/path'" |
| 196 | + ) |
| 197 | + |
| 198 | + if ":" in item_id: |
| 199 | + container_name, path_part = item_id.split(":", maxsplit=1) |
| 200 | + if not container_name or not path_part.startswith("/"): |
| 201 | + raise ValueError( |
| 202 | + f"Invalid item_id format: {item_id}. Expected 'container' or 'container:/path'" |
| 203 | + ) |
| 204 | + return (container_name, path_part.lstrip("/")) |
| 205 | + else: |
| 206 | + return (item_id, "") |
0 commit comments