Skip to content

Commit ac9059f

Browse files
committed
Update OSM script
1 parent 90e2746 commit ac9059f

File tree

1 file changed

+63
-23
lines changed

1 file changed

+63
-23
lines changed

utils/get_osm_data.py

Lines changed: 63 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,10 @@
1515
from argparse import ArgumentParser
1616
import logging
1717
import osmnx as ox
18-
import networkx as nx
19-
import matplotlib.patches as mpatches
20-
import matplotlib.pyplot as plt
21-
import pandas as pd
22-
from shapely.geometry import MultiLineString, LineString
23-
from shapely.ops import linemerge
2418

25-
RGBA_RED = (1, 0, 0, 0.3)
19+
__version__ = "2025.1.16"
20+
21+
RGBA_RED = (1, 0, 0, 1)
2622
RGBA_WHITE = (1, 1, 1, 1)
2723

2824
FLAGS_MOTORWAY = ["motorway", "motorway_link"]
@@ -55,12 +51,24 @@
5551
parser.add_argument(
5652
"--exclude-motorway",
5753
action="store_true",
58-
help="Exclude motorways from the data",
54+
help="Exclude motorways from the data. Default is False",
5955
)
6056
parser.add_argument(
6157
"--exclude-residential",
6258
action="store_true",
63-
help="Exclude residential roads from the data",
59+
help="Exclude residential roads from the data. Default is False",
60+
)
61+
parser.add_argument(
62+
"--allow-duplicates",
63+
action="store_true",
64+
help="Allow duplicated edges in the data. Default is False",
65+
)
66+
parser.add_argument(
67+
"-t",
68+
"--tolerance",
69+
type=int,
70+
default=20,
71+
help="Radius in meters to merge intersections. For more info, see osmnx documentation.",
6472
)
6573
parser = parser.parse_args()
6674
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
@@ -75,6 +83,8 @@
7583
logging.ERROR, f"\033[1;31m{logging.getLevelName(logging.ERROR)}\033[1;0m"
7684
)
7785

86+
logging.info("Welcome to get_osm_data.py v%s", __version__)
87+
7888
# define CUSTOM_FILTER basing on FLAGS and args
7989
FLAGS = FLAGS_NORMAL
8090
if not parser.exclude_motorway:
@@ -83,20 +93,32 @@
8393
FLAGS += FLAGS_RESIDENTIAL
8494
CUSTOM_FILTER = f"[\"highway\"~\"{'|'.join(FLAGS)}\"]"
8595
logging.info("Custom filter: %s", CUSTOM_FILTER)
86-
G_ALL = ox.graph_from_place(
96+
GRAPH = ox.graph_from_place(parser.place, network_type="drive")
97+
ox.plot_graph(GRAPH, show=False, close=True, save=True, filepath="./original.png")
98+
logging.info(
99+
"Original network has %d nodes and %d edges.",
100+
len(GRAPH.nodes),
101+
len(GRAPH.edges),
102+
)
103+
GRAPH = ox.graph_from_place(
87104
parser.place, network_type="drive", custom_filter=CUSTOM_FILTER
88105
)
89106
logging.info(
90-
"Graph created with %d nodes and %d edges.", len(G_ALL.nodes), len(G_ALL.edges)
107+
"Custom filtered graph has %d nodes and %d edges.",
108+
len(GRAPH.nodes),
109+
len(GRAPH.edges),
110+
)
111+
GRAPH = ox.consolidate_intersections(
112+
ox.project_graph(GRAPH), tolerance=parser.tolerance
91113
)
92-
G_ALL = ox.consolidate_intersections(ox.project_graph(G_ALL), tolerance=20)
93114
logging.info(
94-
"Graph consolidated with %d nodes and %d edges.",
95-
len(G_ALL.nodes),
96-
len(G_ALL.edges),
115+
"Consolidated graph has %d nodes and %d edges.",
116+
len(GRAPH.nodes),
117+
len(GRAPH.edges),
97118
)
98-
ox.plot_graph(G_ALL)
99-
gdf_nodes, gdf_edges = ox.graph_to_gdfs(ox.project_graph(G_ALL, to_latlong=True))
119+
# plot graph on a 16x9 figure and save into file
120+
ox.plot_graph(GRAPH, show=False, close=True, save=True, filepath="./final.png")
121+
gdf_nodes, gdf_edges = ox.graph_to_gdfs(ox.project_graph(GRAPH, to_latlong=True))
100122
# notice that osmnid is the index of the gdf_nodes DataFrame, so take it as a column
101123
gdf_nodes.reset_index(inplace=True)
102124
gdf_edges.reset_index(inplace=True)
@@ -109,13 +131,31 @@
109131
gdf_edges = gdf_edges[
110132
["u", "v", "length", "oneway", "lanes", "highway", "maxspeed", "name"]
111133
]
112-
# warn for duplicate edges
113-
if gdf_edges.duplicated(subset=["u", "v"]).sum() > 0:
134+
if parser.allow_duplicates:
135+
N_DUPLICATES = 0
136+
else:
137+
# Check for duplicate edges
138+
duplicated_mask = gdf_edges.duplicated(subset=["u", "v"])
139+
N_DUPLICATES = duplicated_mask.sum()
140+
141+
if N_DUPLICATES > 0:
114142
logging.warning(
115-
"There are %d duplicated edges. They will be removed.",
116-
gdf_edges.duplicated(subset=["u", "v"]).sum(),
143+
"There are %d duplicated edges which will be removed. "
144+
"Please look at them in the promped plot.",
145+
N_DUPLICATES,
117146
)
147+
# Plot the graph with duplicated edges in red
148+
edge_colors = [
149+
RGBA_RED if duplicated_mask.iloc[i] else RGBA_WHITE
150+
for i in range(len(gdf_edges))
151+
]
152+
ox.plot_graph(GRAPH, edge_color=edge_colors)
153+
154+
# Remove duplicated edges
118155
gdf_edges = gdf_edges.drop_duplicates(subset=["u", "v"])
119156
# Save the data
120-
gdf_nodes.to_csv("nodes.csv", sep=";", index=False)
121-
gdf_edges.to_csv("edges.csv", sep=";", index=False)
157+
place = parser.place.split(",")[0].strip().lower()
158+
gdf_nodes.to_csv(f"{place}_nodes.csv", sep=";", index=False)
159+
logging.info('Nodes correctly saved in "%s_nodes.csv"', place)
160+
gdf_edges.to_csv(f"{place}_edges.csv", sep=";", index=False)
161+
logging.info('Edges correctly saved in "%s_edges.csv"', place)

0 commit comments

Comments
 (0)