diff --git a/.github/workflows/cmake_examples.yml b/.github/workflows/cmake_examples.yml index 9f3397bf7..3e3352ae6 100644 --- a/.github/workflows/cmake_examples.yml +++ b/.github/workflows/cmake_examples.yml @@ -7,7 +7,7 @@ on: branches: [ "main" ] env: - BUILD_TYPE: Debug + BUILD_TYPE: Release jobs: build: diff --git a/examples/data/coordinates.csv b/examples/data/coordinates.csv new file mode 100644 index 000000000..aedb92446 --- /dev/null +++ b/examples/data/coordinates.csv @@ -0,0 +1,121 @@ +nodeId;lat;lon +119;110;90 +118;100;90 +117;90;90 +116;80;90 +115;70;90 +114;60;90 +113;50;90 +112;40;90 +111;30;90 +110;20;90 +109;10;90 +108;0;90 +107;110;80 +106;100;80 +105;90;80 +104;80;80 +103;70;80 +102;60;80 +101;50;80 +100;40;80 +99;30;80 +98;20;80 +97;10;80 +96;0;80 +95;110;70 +94;100;70 +93;90;70 +92;80;70 +91;70;70 +90;60;70 +89;50;70 +88;40;70 +87;30;70 +86;20;70 +85;10;70 +84;0;70 +83;110;60 +82;100;60 +81;90;60 +80;80;60 +79;70;60 +78;60;60 +77;50;60 +76;40;60 +75;30;60 +74;20;60 +73;10;60 +72;0;60 +71;110;50 +70;100;50 +69;90;50 +68;80;50 +67;70;50 +66;60;50 +65;50;50 +64;40;50 +63;30;50 +62;20;50 +61;10;50 +60;0;50 +59;110;40 +28;40;20 +27;30;20 +26;20;20 +25;10;20 +24;0;20 +23;110;10 +22;100;10 +11;110;0 +21;90;10 +10;100;0 +20;80;10 +9;90;0 +19;70;10 +8;80;0 +18;60;10 +7;70;0 +0;0;0 +13;10;10 +1;10;0 +14;20;10 +12;0;10 +2;20;0 +15;30;10 +3;30;0 +16;40;10 +4;40;0 +17;50;10 +5;50;0 +6;60;0 +29;50;20 +30;60;20 +31;70;20 +32;80;20 +33;90;20 +34;100;20 +35;110;20 +36;0;30 +37;10;30 +38;20;30 +39;30;30 +40;40;30 +41;50;30 +42;60;30 +43;70;30 +44;80;30 +45;90;30 +46;100;30 +47;110;30 +48;0;40 +49;10;40 +50;20;40 +51;30;40 +52;40;40 +53;50;40 +54;60;40 +55;70;40 +56;80;40 +57;90;40 +58;100;40 diff --git a/examples/slow_charge_rb.cpp b/examples/slow_charge_rb.cpp index 9480b11e6..fde3c1e11 100644 --- a/examples/slow_charge_rb.cpp +++ b/examples/slow_charge_rb.cpp @@ -114,6 +114,7 @@ int main(int argc, char** argv) { for (const auto& [nodeId, node] : graph.nodeSet()) { auto& rb = dynamic_cast(*node); rb.setCapacity(degreeVector(nodeId)); + rb.setTransportCapacity(degreeVector(nodeId)); } std::cout << "Done." << std::endl; @@ -189,7 +190,7 @@ int main(int argc, char** argv) { // std::vector deltas; // lauch progress bar - std::thread t([]() { + std::jthread t([]() { while (progress < MAX_TIME && !bExitFlag) { printLoadingBar(progress, MAX_TIME); std::this_thread::sleep_for(std::chrono::milliseconds(1500)); @@ -302,7 +303,6 @@ int main(int argc, char** argv) { // std::cout << "Probability of turning " << std::quoted(turnNames[i]) << ": " << value * 100 << "%\n"; // ++i; // } - t.join(); std::cout << '\n'; std::cout << "Done." << std::endl; diff --git a/examples/slow_charge_tl.cpp b/examples/slow_charge_tl.cpp index 65ca4a27d..3a246bf7c 100644 --- a/examples/slow_charge_tl.cpp +++ b/examples/slow_charge_tl.cpp @@ -174,6 +174,7 @@ int main(int argc, char** argv) { for (const auto& [nodeId, node] : graph.nodeSet()) { auto& tl = dynamic_cast(*node); tl.setCapacity(degreeVector(nodeId)); + tl.setTransportCapacity(degreeVector(nodeId)); double value = -1.; while (value < 0.) { value = random(); @@ -292,7 +293,7 @@ int main(int argc, char** argv) { // std::vector deltas; // lauch progress bar - std::thread t([]() { + std::jthread t([]() { while (progress < MAX_TIME && !bExitFlag) { printLoadingBar(progress, MAX_TIME); std::this_thread::sleep_for(std::chrono::milliseconds(1500)); @@ -449,7 +450,6 @@ int main(int argc, char** argv) { // std::cout << "Probability of turning " << std::quoted(turnNames[i]) << ": " << value * 100 << "%\n"; // ++i; // } - t.join(); std::cout << '\n'; std::cout << "Done." << std::endl; diff --git a/examples/stalingrado.cpp b/examples/stalingrado.cpp index 7eb3d57d1..36e181158 100644 --- a/examples/stalingrado.cpp +++ b/examples/stalingrado.cpp @@ -94,7 +94,7 @@ int main() { auto& spire = dynamic_cast(*dynamics.graph().streetSet().at(19)); // lauch progress bar - std::thread t([MAX_TIME]() { + std::jthread t([MAX_TIME]() { while (progress < MAX_TIME) { printLoadingBar(progress, MAX_TIME); std::this_thread::sleep_for(std::chrono::milliseconds(1500)); @@ -118,7 +118,6 @@ int main() { dynamics.evolve(false); ++progress; } - t.join(); return 0; } diff --git a/requirements.txt b/requirements.txt index 34ed8b5d1..ee197d239 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,5 @@ +contextily +geopandas matplotlib networkx numpy diff --git a/utils/functions.py b/utils/functions.py index a426a99d6..99982715a 100644 --- a/utils/functions.py +++ b/utils/functions.py @@ -3,21 +3,23 @@ """ import networkx as nx +import pandas as pd -def create_graph_from_adj(adj: list, coord: list): - """Creates a graph given adjacency matrix and a list of coordinates""" +def create_graph_from_adj(adj: list, coord: pd.DataFrame): + """Creates a graph given adjacency matrix and a dataframe of coordinates""" n = len(adj) graph = nx.DiGraph() graph.add_nodes_from(range(n)) for i in range(n): - for j in range(i + 1, n): + for j in range(n): if adj[i, j] > 0: graph.add_edge(i, j, color="g", weight=adj[i, j]) graph.add_edge(j, i, color="g", weight=adj[j, i]) edges = graph.edges() pos = {} - for i in range(n): - pos[i] = coord[i, :] + # coord has id as index with lat, lon columns + for node in graph.nodes(): + pos[node] = (coord.loc[node]["lon"], coord.loc[node]["lat"]) return (graph, edges, pos) diff --git a/utils/gifter.py b/utils/gifter.py index c1371590c..e699a7ead 100644 --- a/utils/gifter.py +++ b/utils/gifter.py @@ -3,9 +3,10 @@ With this, one can see the evolution of the network over time. """ -import multiprocessing +import multiprocessing as mp +import pathlib import platform -import os +from tkinter.filedialog import askopenfilename from argparse import ArgumentParser import networkx as nx import matplotlib.pyplot as plt @@ -14,20 +15,15 @@ from tqdm import tqdm from PIL import Image, ImageFont import pandas as pd +import geopandas as gpd +import contextily as ctx from functions import create_graph_from_adj # Constants -TIME_BEGIN = 20 * 3600 # None to take the last frames -N_FRAMES = 1 -TIME_GRANULARITY = 300 COLORMAP = colormaps["RdYlGn_r"] -# INPUT_FILE_NAME = 'output_sctl_noOPT/densities.csv' -INPUT_FOLDER = "./05" OUTPUT_FILE_NAME = "evolution.gif" -N_CORES = 3 - # if on wsl FONT_PATH = "" if platform.system() == "Linux": @@ -36,33 +32,57 @@ FONT_PATH = "/System/Library/Fonts/Supplemental/Arial.ttf" -def create_image(__df, __time, _graph, _pos, _n): +def create_image(__df, __time, _graph, _pos, _edges, _n, _gdf): """ Generates and saves an image of a graph with edges colored based on density. Parameters: __df (DataFrame): A pandas DataFrame containing the data. __time (int): The specific time (in seconds) for which the graph is to be generated. + _graph (Graph): A networkx Graph object. + _pos (dict): A dictionary containing the positions of the nodes. + _edges (list): A list containing the edges. + _n (int): The number of nodes in the graph. + _gdf (GeoDataFrame): A geopandas GeoDataFrame containing the coordinates of the nodes. Returns: tuple: A tuple containing the time in seconds and the path to the saved image. """ for col in __df.columns: index = int(col) - density = __df.loc[__time][col] / (225 / 2000) + density = __df.loc[__time][col] # / (225 / 2000) src = index // _n dst = index % _n # set color of edge based on density using a colormap from green to red _graph[src][dst]["color"] = COLORMAP(density) # draw graph with colors - colors = [_graph[u][v]["color"] for u, v in edges] + colors = [_graph[u][v]["color"] for u, v in _edges] # draw graph - _, ax = plt.subplots(figsize=(10, 10)) - nx.draw(_graph, _pos, edge_color=colors, with_labels=True, ax=ax) + _, ax = plt.subplots() + if _gdf is not None: + limits = _gdf.total_bounds + np.array([-0.001, -0.001, 0.001, 0.001]) + ax.set_xlim(limits[0], limits[2]) + ax.set_ylim(limits[1], limits[3]) + nx.draw_networkx_edges( + _graph, + _pos, + edgelist=_edges, + edge_color=colors, + ax=ax, + connectionstyle="arc3,rad=0.05", + arrowsize=5, + arrowstyle="->", + ) + nx.draw_networkx_nodes(_graph, _pos, ax=ax, node_size=69) + nx.draw_networkx_labels(_graph, _pos, ax=ax, font_size=5) + if _gdf is not None: + # _gdf.plot(ax=ax) + ctx.add_basemap( + ax, crs=_gdf.crs.to_string(), source=ctx.providers.OpenStreetMap.Mapnik + ) plt.box(False) h_time = f"{(__time / 3600):.2f}" - print(h_time) - plt.title(f"Time: ${(__time / 3600):.2f} \\ h$") + plt.title(f"Time: {(__time // 3600):02d}:{(__time % 3600) // 60:02d} (hh:mm)") plt.savefig(f"./temp_img/{h_time}.png", dpi=300, bbox_inches="tight") return (__time, f"./temp_img/{h_time}.png") @@ -72,71 +92,108 @@ def create_image(__df, __time, _graph, _pos, _n): description="Script to generate a road network evolution GIF." ) parser.add_argument( - "--adj_matrix", + "--adj-matrix", type=str, - required=True, + default=None, + required=False, help="Path to the adjacency matrix file.", ) parser.add_argument( "--coordinates", type=str, - required=True, + default=None, + required=False, help="Path to the coordinates file.", ) + parser.add_argument( + "--densities", + type=str, + default=None, + required=False, + help="Path to the input density csv.", + ) + parser.add_argument( + "--use-basemap", + type=bool, + default=False, + required=False, + help="Use basemap for plotting.", + ) + parser.add_argument( + "--time-granularity", + type=int, + default=300, + required=False, + help="Time granularity in seconds.", + ) + parser.add_argument( + "--time-begin", + type=int, + default=None, + required=False, + help="Time to begin plotting. If None, it will take the last N_FRAMES.", + ) + parser.add_argument( + "--n-frames", + type=int, + default=10, + required=False, + help="Number of frames to generate.", + ) args = parser.parse_args() # Load the graph # read the adjacency matrix discarding the first line - adj = np.loadtxt(args.adj_matrix, skiprows=1) + RESPONSE = args.adj_matrix + if RESPONSE is None: + RESPONSE = askopenfilename( + title="Select the adjacency matrix file", + filetypes=[("DAT files", "*.dat")], + ) + adj = np.loadtxt(RESPONSE, skiprows=1) n = len(adj) - # read the coordinates - coord = np.loadtxt(args.coordinates) + RESPONSE = args.coordinates + if RESPONSE is None: + RESPONSE = askopenfilename( + title="Select the coordinates file", filetypes=[("CSV files", "*.csv")] + ) + coord = pd.read_csv(RESPONSE, sep=";") + coord = coord.set_index("nodeId") + RESPONSE = args.densities + if RESPONSE is None: + RESPONSE = askopenfilename( + title="Select the input densities file", + filetypes=[("CSV files", "*.csv")], + ) + GDF = None + if args.use_basemap: + # draw city map + GDF = gpd.GeoDataFrame( + coord, geometry=gpd.points_from_xy(coord.lon, coord.lat), crs="EPSG:4326" + ) G, edges, pos = create_graph_from_adj(adj, coord) font = ImageFont.truetype(FONT_PATH, 35) - # open densities.csv file - # read the densities - # for each time, create a new image with the graph and the densities - # save the images in the temp_img folder - # create a gif from the images in the temp_img folder - df_array = [] - for folder in os.listdir(INPUT_FOLDER): - if os.path.isdir(os.path.join(INPUT_FOLDER, folder)): - # clear temp - temp = pd.read_csv( - INPUT_FOLDER + "/" + folder + "/" + "densities.csv", sep=";" - ) - temp = temp.set_index("time") - # remove last column - temp = temp.iloc[:, :-1] - # temp = temp * 1000 - - # take only rows with index % 300 == 0 - temp = temp[temp.index % TIME_GRANULARITY == 0] - df_array.append(temp) - - if len(df_array) > 0: - df = pd.concat(df_array) - df = df.groupby(df.index).mean() - - # print(df.head()) - # df = pd.read_csv(INPUT_FILE_NAME, sep=";") - # df = df.set_index('time') - # # remove last column - # df = df.iloc[:,:-1] + df = pd.read_csv(RESPONSE, sep=";") + df = df.set_index("time") + # remove last column + df = df.iloc[:, :-1] # take only rows with index % 300 == 0 - # df = df[df.index % TIME_GRANULARITY == 0] - if TIME_BEGIN is not None: - df = df[df.index > TIME_BEGIN] + df = df[df.index % args.time_granularity == 0] + if args.time_begin is not None: + df = df[df.index > args.time_begin] # take N_FRAMES from the beginning - df = df.head(N_FRAMES) + df = df.head(args.n_frames) else: # take the last N_FRAMES - df = df.tail(N_FRAMES) + df = df.tail(args.n_frames) + + # check if the temp_img folder exists, if not create it + pathlib.Path("./temp_img").mkdir(parents=True, exist_ok=True) - with multiprocessing.Pool(N_CORES) as pool: + with mp.Pool() as pool: frames = [] jobs = [] @@ -149,7 +206,9 @@ def create_image(__df, __time, _graph, _pos, _n): time, G, pos, + edges, n, + GDF, ), ) ) @@ -157,11 +216,10 @@ def create_image(__df, __time, _graph, _pos, _n): # use tqdm and take results: results = [job.get() for job in tqdm(jobs)] results = sorted(results, key=lambda x: x[0]) - # frames = [result[1] for result in results] frames = [Image.open(result[1]) for result in tqdm(results)] # if NFRAMES is 1, save a png image - if N_FRAMES == 1: + if args.n_frames == 1: frames[0].save(OUTPUT_FILE_NAME.replace(".gif", ".png"), format="PNG") else: # Save into a GIF file that loops forever diff --git a/utils/plotter.py b/utils/plotter.py index 4981c07f7..e35c99f18 100644 --- a/utils/plotter.py +++ b/utils/plotter.py @@ -232,6 +232,18 @@ def adjust_dataframe(_df): # Load data ############################################ + adj = np.loadtxt("../examples/data/matrix.dat", skiprows=1) + n = len(adj) + # read the coordinates + coord = pd.read_csv("../examples/data/coordinates.csv", sep=";") + coord = coord.set_index("nodeId") + # create a directed graph + G, edges, pos = create_graph_from_adj(adj, coord) + + # Draws the graph for debugging + nx.draw(G, pos, with_labels=True, node_size=100, node_color="skyblue", font_size=8) + plt.show() + df_array = [] df_den_array = [] @@ -271,13 +283,6 @@ def adjust_dataframe(_df): # Load densities ############################################ - adj = np.loadtxt("../examples/data/matrix.dat", skiprows=1) - n = len(adj) - # read the coordinates - coord = np.loadtxt("../examples/data/coordinates.dsm", skiprows=1) - # create a directed graph - G, edges, pos = create_graph_from_adj(adj, coord) - # compute mean density for each row mean_density = DF_DEN.mean(axis=1)