Skip to content

Commit d34329b

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 6312ba9 commit d34329b

File tree

2 files changed

+13
-37
lines changed

2 files changed

+13
-37
lines changed

notebooks/examples/densenet.ipynb

Lines changed: 9 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -332,7 +332,6 @@
332332
"import os\n",
333333
"from tempfile import TemporaryDirectory\n",
334334
"\n",
335-
"import pandas as pd\n",
336335
"import requests\n",
337336
"\n",
338337
"url = \"https://s3.embl.de/spatialdata/spatialdata-sandbox/generated_data/xenium_visium_integration/xenium_rep1_celltype_major.csv\"\n",
@@ -363,9 +362,7 @@
363362
],
364363
"source": [
365364
"xenium_sdata[\"table\"].obs = pd.merge(xenium_sdata[\"table\"].obs, df, on=\"cell_id\")\n",
366-
"xenium_sdata[\"table\"].obs[\"celltype_major\"] = (\n",
367-
" xenium_sdata[\"table\"].obs[\"celltype_major\"].astype(\"category\")\n",
368-
")"
365+
"xenium_sdata[\"table\"].obs[\"celltype_major\"] = xenium_sdata[\"table\"].obs[\"celltype_major\"].astype(\"category\")"
369366
]
370367
},
371368
{
@@ -936,9 +933,7 @@
936933
"outputs": [],
937934
"source": [
938935
"class TilesDataModule(LightningDataModule):\n",
939-
" def __init__(\n",
940-
" self, batch_size: int, num_workers: int, dataset: torch.utils.data.Dataset\n",
941-
" ):\n",
936+
" def __init__(self, batch_size: int, num_workers: int, dataset: torch.utils.data.Dataset):\n",
942937
" super().__init__()\n",
943938
"\n",
944939
" self.batch_size = batch_size\n",
@@ -1015,25 +1010,19 @@
10151010
" self.loss_function = CrossEntropyLoss()\n",
10161011
"\n",
10171012
" # make the model\n",
1018-
" self.model = DenseNet121(\n",
1019-
" spatial_dims=2, in_channels=in_channels, out_channels=num_classes\n",
1020-
" )\n",
1013+
" self.model = DenseNet121(spatial_dims=2, in_channels=in_channels, out_channels=num_classes)\n",
10211014
"\n",
10221015
" def forward(self, x) -> torch.Tensor:\n",
10231016
" return self.model(x)\n",
10241017
"\n",
1025-
" def _compute_loss_from_batch(\n",
1026-
" self, batch: Dict[str, torch.Tensor], batch_idx: int\n",
1027-
" ) -> float:\n",
1018+
" def _compute_loss_from_batch(self, batch: Dict[str, torch.Tensor], batch_idx: int) -> float:\n",
10281019
" inputs = batch[0]\n",
10291020
" labels = batch[1]\n",
10301021
"\n",
10311022
" outputs = self.model(inputs)\n",
10321023
" return self.loss_function(outputs, labels)\n",
10331024
"\n",
1034-
" def training_step(\n",
1035-
" self, batch: Dict[str, torch.Tensor], batch_idx: int\n",
1036-
" ) -> Dict[str, float]:\n",
1025+
" def training_step(self, batch: Dict[str, torch.Tensor], batch_idx: int) -> Dict[str, float]:\n",
10371026
" # compute the loss\n",
10381027
" loss = self._compute_loss_from_batch(batch=batch, batch_idx=batch_idx)\n",
10391028
"\n",
@@ -1124,9 +1113,7 @@
11241113
"print(f\"Using {BATCH_SIZE} batch size.\")\n",
11251114
"print(f\"Using {NUM_WORKERS} workers.\")\n",
11261115
"\n",
1127-
"tiles_data_module = TilesDataModule(\n",
1128-
" batch_size=BATCH_SIZE, num_workers=NUM_WORKERS, dataset=dataset\n",
1129-
")\n",
1116+
"tiles_data_module = TilesDataModule(batch_size=BATCH_SIZE, num_workers=NUM_WORKERS, dataset=dataset)\n",
11301117
"\n",
11311118
"tiles_data_module.setup()\n",
11321119
"train_dl = tiles_data_module.train_dataloader()\n",
@@ -1870,9 +1857,7 @@
18701857
"source": [
18711858
"small_dataset = ImageTilesDataset(\n",
18721859
" sdata=small_sdata,\n",
1873-
" regions_to_images={\n",
1874-
" \"cell_boundaries\": \"CytAssist_FFPE_Human_Breast_Cancer_full_image\"\n",
1875-
" },\n",
1860+
" regions_to_images={\"cell_boundaries\": \"CytAssist_FFPE_Human_Breast_Cancer_full_image\"},\n",
18761861
" regions_to_coordinate_systems={\"cell_boundaries\": \"aligned\"},\n",
18771862
" tile_dim_in_units=100,\n",
18781863
" rasterize=True,\n",
@@ -1925,17 +1910,13 @@
19251910
" region, instance_id = small_dataset.dataset_index.iloc[i][[\"region\", \"instance_id\"]]\n",
19261911
" shapes = small_sdata[region]\n",
19271912
" transformations = get_transformation(shapes, get_all=True)\n",
1928-
" tile = ShapesModel.parse(\n",
1929-
" GeoDataFrame(geometry=shapes.loc[instance_id]), transformations=transformations\n",
1930-
" )\n",
1913+
" tile = ShapesModel.parse(GeoDataFrame(geometry=shapes.loc[instance_id]), transformations=transformations)\n",
19311914
" # BUG: we need to explicitly remove the coordinate system global if we want to combine\n",
19321915
" # images and shapes plots into a single subplot\n",
19331916
" # https://github.com/scverse/spatialdata-plot/issues/176\n",
19341917
" sdata_tile[\"cell_boundaries\"] = tile\n",
19351918
" if \"global\" in get_transformation(sdata_tile[\"cell_boundaries\"], get_all=True):\n",
1936-
" sd.transformations.remove_transformation(\n",
1937-
" sdata_tile[\"cell_boundaries\"], \"global\"\n",
1938-
" )\n",
1919+
" sd.transformations.remove_transformation(sdata_tile[\"cell_boundaries\"], \"global\")\n",
19391920
" sdata_tile.pl.render_images().pl.render_shapes(\n",
19401921
" # outline_color='predicted_celltype_major', # not yet supported: https://github.com/scverse/spatialdata-plot/issues/137\n",
19411922
" outline_width=3.0,\n",

notebooks/examples/napari_rois.ipynb

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1118,9 +1118,7 @@
11181118
"\n",
11191119
"for shape in [\"lasso\", \"my_shapes\", \"my_shapes_2\"]:\n",
11201120
" for polygon in visium_sdata[shape].geometry:\n",
1121-
" table = polygon_query(\n",
1122-
" visium_sdata, polygon=polygon, target_coordinate_system=\"aligned\"\n",
1123-
" )[\"table\"]\n",
1121+
" table = polygon_query(visium_sdata, polygon=polygon, target_coordinate_system=\"aligned\")[\"table\"]\n",
11241122
" filtered_tables_unmerged[shape].append(table)\n",
11251123
" filtered_tables[shape] = ad.concat(filtered_tables_unmerged[shape])"
11261124
]
@@ -1277,9 +1275,7 @@
12771275
"categories = [\"unassigned\"] + list(filtered_tables.keys())\n",
12781276
"n = len(visium_sdata[\"table\"])\n",
12791277
"\n",
1280-
"visium_sdata[\"table\"].obs[\"annotation\"] = pd.Categorical(\n",
1281-
" [\"unassigned\" for _ in range(n)], categories=categories\n",
1282-
")\n",
1278+
"visium_sdata[\"table\"].obs[\"annotation\"] = pd.Categorical([\"unassigned\" for _ in range(n)], categories=categories)\n",
12831279
"\n",
12841280
"for shape, subtable in filtered_tables.items():\n",
12851281
" in_shape = subtable.obs.index\n",
@@ -1364,9 +1360,8 @@
13641360
"plt.figure(figsize=(12, 7))\n",
13651361
"ax = plt.gca()\n",
13661362
"(\n",
1367-
" visium_sdata.pl.render_images(\n",
1368-
" \"CytAssist_FFPE_Human_Breast_Cancer_full_image\"\n",
1369-
" ).pl.render_shapes(\"CytAssist_FFPE_Human_Breast_Cancer\", color=\"annotation\")\n",
1363+
" visium_sdata.pl.render_images(\"CytAssist_FFPE_Human_Breast_Cancer_full_image\")\n",
1364+
" .pl.render_shapes(\"CytAssist_FFPE_Human_Breast_Cancer\", color=\"annotation\")\n",
13701365
" # .pl.render_shapes(\"lasso\", color=\"#00000040\")\n",
13711366
" # .pl.render_shapes(\"my_shapes\", color=\"#00000040\")\n",
13721367
" # .pl.render_shapes(\"my_shapes_2\", color=\"#00000040\")\n",

0 commit comments

Comments
 (0)