1+ from torch .utils .data import Dataset
2+ from PIL import Image
3+ import torch .nn .functional as F
4+ import os
5+ from tqdm .notebook import tqdm
6+ import torch
7+ import numpy as np
8+ import random
9+
10+ # Credit - Bahar
11+ class MapReader :
12+ def __init__ (self , filename ):
13+ f = open (filename , 'r' )
14+ data = f .read ().split ()
15+ self .size = (int (data [0 ]), int (data [1 ]))
16+ self .data = [[int (data [i * self .size [1 ] + j + 2 ]) for j in range (self .size [1 ])] for i in range (self .size [0 ])]
17+
18+ # Converter object that can return character for OpenStreetMap layer group
19+ # Credit - Bahar
20+ class LayerToCharConverter :
21+ def __init__ (self , char_groups = [[0 ], [1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 , 12 ]]):
22+ self .char_groups = char_groups
23+ self .char_size = len (self .char_groups )
24+
25+ def get_char (self , layer ):
26+ for i in range (self .char_size ):
27+ if layer in self .char_groups [i ]:
28+ return i
29+ raise Exception ("layer not available in char groups: {}" .format (layer ))
30+
31+ multi_layer_converter = LayerToCharConverter ([[0 ], [1 , 2 , 3 , 4 , 5 ], [6 , 7 ], [8 , 9 , 10 , 11 , 12 ]])
32+ single_layer_converter = LayerToCharConverter ([[0 ], [1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 , 12 ]])
33+
34+ # New Cell
35+ # Credit - Bahar
36+ # Modifications - Ishaan
37+
38+ class MapsDataset (Dataset ):
39+ def __init__ (self , window_size , step_size , converter ):
40+ self .char_size = converter .char_size
41+ self .converter = converter
42+ self .window_size = window_size
43+ self .step_size = step_size
44+ self .samples = []
45+ self .block_size = self .window_size [0 ] * self .window_size [1 ] - 1
46+
47+ def __len__ (self ):
48+ return len (self .samples )
49+
50+ def __getitem__ (self , idx ):
51+ sample = (self .samples [idx ], self .samples [idx ], self .samples [idx ])
52+ return torch .from_numpy (np .array (sample )).unsqueeze (0 )
53+ #flat = torch.from_numpy(np.array(sample)).view(-1)
54+ #flat = flat[self.perm].float()
55+ #return flat
56+
57+ def add (self , mapReader ):
58+ for i in range (0 , mapReader .size [0 ] - self .window_size [0 ] + 1 , self .step_size ):
59+ for j in range (0 , mapReader .size [1 ] - self .window_size [1 ] + 1 , self .step_size ):
60+ self .samples .append ([[
61+ (self .converter .get_char (mapReader .data [i + x ][j + y ]) / (len (self .converter .char_groups ) - 1 )) * - 2 + 1
62+ for y in range (self .window_size [1 ])]
63+ for x in range (self .window_size [0 ])])
64+
65+ def shuffle (self ):
66+ random .shuffle (self .samples )
67+
68+ def get_train_test (self , train_ratio ):
69+ train_dataset = MapsDataset (self .window_size , self .step_size , self .converter )
70+ test_dataset = MapsDataset (self .window_size , self .step_size , self .converter )
71+ train_size = int (len (self .samples ) * train_ratio )
72+ train_dataset .samples = self .samples [:train_size ]
73+ test_dataset .samples = self .samples [train_size :]
74+ return train_dataset , test_dataset
75+
76+ # Some utility functions
77+ def img_to_tensor (im ):
78+ return torch .tensor (np .array (im .convert ('RGB' ))/ 255 ).permute (2 , 0 , 1 ).unsqueeze (0 ) * 2 - 1
79+
80+ def tensor_to_image (t ):
81+ return Image .fromarray (np .array (((t .squeeze ().permute (1 , 2 , 0 )+ 1 )/ 2 ).clip (0 , 1 )* 255 ).astype (np .uint8 ))
82+
83+ def gather (consts : torch .Tensor , t : torch .Tensor ):
84+ """Gather consts for $t$ and reshape to feature map shape"""
85+ c = consts .gather (- 1 , t )
86+ return c .reshape (- 1 , 1 , 1 , 1 )
87+
88+ def map_img_to_tensor (im ):
89+ return torch .tensor (np .array (im .convert ('L' ))/ 255 ).unsqueeze (0 ) * 2 - 1
90+
91+ def map_tensor_to_image (t ):
92+ return Image .fromarray (np .array (((t .squeeze ()+ 1 )/ 2 ).clip (0 , 1 )* 255 ).astype (np .uint8 ))
0 commit comments