diff --git a/tests/test_other_functions.py b/tests/test_other_functions.py index fa14556..fc0c3a9 100644 --- a/tests/test_other_functions.py +++ b/tests/test_other_functions.py @@ -161,7 +161,7 @@ def test_scenario_write_and_read(): W.adddemand_point2point(0.5, 0.5, 2.5, 2.5, 0, 1800, volume=100) W.adddemand_area2area(0.5, 0.5, 2, 2.5, 2.5, 2, 0, 1800, volume=100) - W.save_scenario("out/test_grid.pkl") + W.save_scenario("out/test_grid.uxsim_scenario") W.exec_simulation() W.analyzer.print_simple_stats() @@ -181,7 +181,7 @@ def test_scenario_write_and_read(): random_seed=42 ) - W2.load_scenario("out/test_grid.pkl") + W2.load_scenario("out/test_grid.uxsim_scenario") W2.exec_simulation() @@ -192,6 +192,79 @@ def test_scenario_write_and_read(): assert df1["total_travel_time"][0] == df2["total_travel_time"][0] +def test_scenario_write_and_read_areas(): + ################################## + # Iter 1 + ################################## + + W = World( + name="", + deltan=10, + tmax=4000, + print_mode=1, save_mode=1, show_mode=0, + random_seed=42, + ) + + n_nodes = 5 + imax = n_nodes + jmax = n_nodes + nodes = {} + for i in range(imax): + for j in range(jmax): + nodes[i,j] = W.addNode(f"n{(i,j)}", i, j, flow_capacity=1.6) + + links = {} + for i in range(imax): + for j in range(jmax): + if i != imax-1: + links[i,j,i+1,j] = W.addLink(f"l{(i,j,i+1,j)}", nodes[i,j], nodes[i+1,j], length=1000) + if i != 0: + links[i,j,i-1,j] = W.addLink(f"l{(i,j,i-1,j)}", nodes[i,j], nodes[i-1,j], length=1000) + if j != jmax-1: + links[i,j,i,j+1] = W.addLink(f"l{(i,j,i,j+1)}", nodes[i,j], nodes[i,j+1], length=1000) + if j != 0: + links[i,j,i,j-1] = W.addLink(f"l{(i,j,i,j-1)}", nodes[i,j], nodes[i,j-1], length=1000) + + + areas = { + "areaN": [nodes[0,0], nodes[0, n_nodes-1]], + "areaS": [nodes[n_nodes-1,0], nodes[n_nodes-1, n_nodes-1]], + "areaNW": [nodes[0,0]], + "areaSE": [nodes[n_nodes-1, n_nodes-1]] + } + + W.adddemand_areas2areas(areas["areaN"], areas["areaS"], 0, 3000, volume=7000) + + W.save_scenario("out/test_area.uxsim_scenario") + + W.exec_simulation() + W.analyzer.print_simple_stats() + + df1 = W.analyzer.basic_to_pandas() + print(df1) + + ################################## + # Iter 2 + ################################## + + W2 = World( + name="", + deltan=10, + tmax=4000, + print_mode=1, save_mode=1, show_mode=0, + random_seed=42, + ) + + W2.load_scenario("out/test_area.uxsim_scenario") + + W2.exec_simulation() + W2.analyzer.print_simple_stats() + + df2 = W2.analyzer.basic_to_pandas() + print(df2) + + assert df1["total_travel_time"][0] == df2["total_travel_time"][0] + def test_k_shortest_path(): W = World( name="", # Scenario name @@ -381,4 +454,116 @@ def test_printtry(): lis = [1,2,3] printtry(lambda: (lis[0])) printtry(lambda: (lis[10])) - assert True \ No newline at end of file + assert True + +def test_area(): + W = World( + name="", + deltan=10, + tmax=4000, + print_mode=1, save_mode=1, show_mode=0, + random_seed=None, + ) + + n_nodes = 5 + imax = n_nodes + jmax = n_nodes + nodes = {} + for i in range(imax): + for j in range(jmax): + nodes[i,j] = W.addNode(f"n{(i,j)}", i, j, flow_capacity=1.6) + + links = {} + for i in range(imax): + for j in range(jmax): + if i != imax-1: + links[i,j,i+1,j] = W.addLink(f"l{(i,j,i+1,j)}", nodes[i,j], nodes[i+1,j], length=1000) + if i != 0: + links[i,j,i-1,j] = W.addLink(f"l{(i,j,i-1,j)}", nodes[i,j], nodes[i-1,j], length=1000) + if j != jmax-1: + links[i,j,i,j+1] = W.addLink(f"l{(i,j,i,j+1)}", nodes[i,j], nodes[i,j+1], length=1000) + if j != 0: + links[i,j,i,j-1] = W.addLink(f"l{(i,j,i,j-1)}", nodes[i,j], nodes[i,j-1], length=1000) + + + areas = { + "areaN": [nodes[0,0], nodes[0, n_nodes-1]], + "areaS": [nodes[n_nodes-1,0], nodes[n_nodes-1, n_nodes-1]], + "areaNW": [nodes[0,0]], + "areaSE": [nodes[n_nodes-1, n_nodes-1]] + } + + W.adddemand_areas2areas(areas["areaN"], areas["areaS"], 0, 3000, volume=7000) + + W.exec_simulation() + W.analyzer.print_simple_stats() + + df = W.analyzer.areas2areas_to_pandas(areas.values(), list(areas.keys())) + print(df) + + print(W.analyzer.areas2areas_to_pandas(areas.values())) + + assert W.analyzer.trip_all == 7000 + assert df["total_trips"][(df["origin_area"] == "areaN") & (df["destination_area"] == "areaS")].values[0] == 7000 + assert df["average_free_travel_time"][(df["origin_area"] == "areaN") & (df["destination_area"] == "areaS")].values[0] == 300.0 + assert df["average_shortest_distance"][(df["origin_area"] == "areaN") & (df["destination_area"] == "areaS")].values[0] == 6000.0 + assert df["total_trips"][(df["origin_area"] == "areaNW") & (df["destination_area"] == "areaSE")].values[0] == 1750 + assert df["average_free_travel_time"][(df["origin_area"] == "areaNW") & (df["destination_area"] == "areaSE")].values[0] == 400.0 + assert df["average_shortest_distance"][(df["origin_area"] == "areaNW") & (df["destination_area"] == "areaSE")].values[0] == 8000.0 + +@pytest.mark.flaky(reruns=10) +def test_vehicle_group(): + W = World( + name="", + deltan=10, + tmax=4000, + print_mode=1, save_mode=1, show_mode=0, + random_seed=None, + ) + + n_nodes = 4 + imax = n_nodes + jmax = n_nodes + nodes = {} + for i in range(imax): + for j in range(jmax): + nodes[i,j] = W.addNode(f"n{(i,j)}", i, j, flow_capacity=1.6) + + links = {} + for i in range(imax): + for j in range(jmax): + if i != imax-1: + links[i,j,i+1,j] = W.addLink(f"l{(i,j,i+1,j)}", nodes[i,j], nodes[i+1,j], length=1000) + if i != 0: + links[i,j,i-1,j] = W.addLink(f"l{(i,j,i-1,j)}", nodes[i,j], nodes[i-1,j], length=1000) + if j != jmax-1: + links[i,j,i,j+1] = W.addLink(f"l{(i,j,i,j+1)}", nodes[i,j], nodes[i,j+1], length=1000) + if j != 0: + links[i,j,i,j-1] = W.addLink(f"l{(i,j,i,j-1)}", nodes[i,j], nodes[i,j-1], length=1000) + + + areas = { + "areaN": [nodes[0,0], nodes[0, n_nodes-1]], + "areaS": [nodes[n_nodes-1,0], nodes[n_nodes-1, n_nodes-1]], + "areaNW": [nodes[0,0]], + "areaSE": [nodes[n_nodes-1, n_nodes-1]] + } + + W.adddemand_areas2areas(areas["areaN"], areas["areaS"], 0, 3000, volume=7000) + + W.exec_simulation() + W.analyzer.print_simple_stats() + + dt = 500 + group_dict = {} + for t in range(0, W.TMAX, dt): + group_dict[f"depart_t={t}"] = [veh for veh in W.VEHICLES.values() if t <= veh.departure_time_in_second < t+dt] + + df = W.analyzer.vehicle_groups_to_pandas(list(group_dict.values()), list(group_dict.keys())) + print(df) + + assert df["average_travel_time"][df["group"]=="depart_t=0"].values[0] < df["average_travel_time"][df["group"]=="depart_t=1500"].values[0] + assert df["average_delay_ratio"][df["group"]=="depart_t=0"].values[0] < df["average_delay_ratio"][df["group"]=="depart_t=1500"].values[0] + assert df["average_traveled_distance"][df["group"]=="depart_t=0"].values[0] < df["average_traveled_distance"][df["group"]=="depart_t=1500"].values[0] + assert df["average_detour_ratio"][df["group"]=="depart_t=0"].values[0] < df["average_detour_ratio"][df["group"]=="depart_t=1500"].values[0] + assert df["average_speed"][df["group"]=="depart_t=0"].values[0] > df["average_speed"][df["group"]=="depart_t=1500"].values[0] \ No newline at end of file diff --git a/uxsim/analyzer.py b/uxsim/analyzer.py index 839569b..9b7f108 100644 --- a/uxsim/analyzer.py +++ b/uxsim/analyzer.py @@ -84,6 +84,8 @@ def basic_analysis(s): s.trip_completed = np.sum(df["completed_trips"]) s.trip_all = np.sum(df["total_trips"]) + + s.total_distance_traveled = np.sum(df["average_distance_traveled_per_veh"]*df["total_trips"]) if s.trip_completed: s.total_travel_time = np.sum(df["completed_trips"]*df["average_travel_time"]) @@ -99,7 +101,7 @@ def basic_analysis(s): def od_analysis(s): """ - Analyze OD-specific stats: number of trips, number of completed trips, free-flow travel time, average travel time, its std + Analyze OD-specific stats: number of trips, number of completed trips, free-flow travel time, average travel time, its std, total distance traveled """ if s.flag_od_analysis: return 0 @@ -112,33 +114,55 @@ def od_analysis(s): s.od_tt = ddict(lambda: []) s.od_tt_ave = ddict(lambda: 0) s.od_tt_std = ddict(lambda: 0) + s.od_dist = ddict(lambda: []) + s.od_dist_total = ddict(lambda: 0) + s.od_dist_ave = ddict(lambda: 0) + s.od_dist_std = ddict(lambda: 0) + s.od_dist_min = ddict(lambda: 0) dn = s.W.DELTAN - #自由旅行時間 + #自由旅行時間と最短距離 adj_mat_time = np.zeros([len(s.W.NODES), len(s.W.NODES)]) + adj_mat_dist = np.zeros([len(s.W.NODES), len(s.W.NODES)]) for link in s.W.LINKS: i = link.start_node.id j = link.end_node.id if s.W.ADJ_MAT[i,j]: adj_mat_time[i,j] = link.length/link.u + adj_mat_dist[i,j] = link.length if link.capacity_in == 0: #流入禁止の場合は通行不可 adj_mat_time[i,j] = np.inf + adj_mat_dist[i,j] = np.inf else: adj_mat_time[i,j] = np.inf - dist = floyd_warshall(adj_mat_time) + adj_mat_dist[i,j] = np.inf + dist_time = floyd_warshall(adj_mat_time) + dist_space = floyd_warshall(adj_mat_dist) for veh in s.W.VEHICLES.values(): o = veh.orig d = veh.dest if d != None: s.od_trips[o,d] += dn + + veh_links = [rec[1] for rec in veh.log_t_link if hasattr(rec[1], "length")] + veh_dist_traveled = sum([l.length for l in veh_links]) + if veh.state == "run": + veh_dist_traveled += veh.x + veh.distance_traveled = veh_dist_traveled + s.od_dist[o,d].append(veh.distance_traveled) + if veh.travel_time != -1: s.od_trips_comp[o,d] += dn s.od_tt[o,d].append(veh.travel_time) for o,d in s.od_tt.keys(): s.od_tt_ave[o,d] = np.average(s.od_tt[o,d]) s.od_tt_std[o,d] = np.std(s.od_tt[o,d]) - s.od_tt_free[o,d] = dist[o.id, d.id] + s.od_tt_free[o,d] = dist_time[o.id, d.id] + s.od_dist_total[o,d] = np.sum(s.od_dist[o,d]) + s.od_dist_min[o,d] = dist_space[o.id, d.id] + s.od_dist_ave[o,d] = np.average(s.od_dist[o,d]) + s.od_dist_std[o,d] = np.std(s.od_dist[o,d]) def link_analysis_coarse(s): """ @@ -303,6 +327,7 @@ def print_simple_stats(s, force_print=False): s.W.print(f" average travel time of trips:\t {s.average_travel_time:.1f} s") s.W.print(f" average delay of trips:\t {s.average_delay:.1f} s") s.W.print(f" delay ratio:\t\t\t {s.average_delay/s.average_travel_time:.3f}") + s.W.print(f" total distance traveled:\t {s.total_distance_traveled:.1f} m") if force_print == 1 and s.W.print_mode == 0: print("results:") @@ -313,6 +338,7 @@ def print_simple_stats(s, force_print=False): print(f" average travel time of trips:\t {s.average_travel_time:.1f} s") print(f" average delay of trips:\t {s.average_delay:.1f} s") print(f" delay ratio:\t\t\t {s.average_delay/s.average_travel_time:.3f}") + print(f" total distance traveled:\t {s.total_distance_traveled:.1f} m") def comp_route_travel_time(s, t, route): @@ -1237,14 +1263,16 @@ def vehicle_trip_to_pandas(s): - 'final_state': the final state of the vehicle. - 'travel_time': the travel time of the vehicle. - 'average_speed': the average speed of the vehicle. + - 'distance_traveled': the distance traveled by the vehicle. """ - out = [["name", "orig", "dest", "departure_time", "final_state", "travel_time", "average_speed"]] + out = [["name", "orig", "dest", "departure_time", "final_state", "travel_time", "average_speed", "distance_traveled"]] for veh in s.W.VEHICLES.values(): veh_dest_name = veh.dest.name if veh.dest != None else None veh_state = veh.log_state[-1] veh_ave_speed = np.average([v for v in veh.log_v if v != -1]) + veh_dist_traveled = veh.distance_traveled - out.append([veh.name, veh.orig.name, veh_dest_name, veh.departure_time*s.W.DELTAT, veh_state, veh.travel_time, veh_ave_speed]) + out.append([veh.name, veh.orig.name, veh_dest_name, veh.departure_time*s.W.DELTAT, veh_state, veh.travel_time, veh_ave_speed, veh_dist_traveled]) s.df_vehicle_trip = pd.DataFrame(out[1:], columns=out[0]) return s.df_vehicle_trip @@ -1268,7 +1296,6 @@ def gps_like_log_to_pandas(s): s.df_gps_like_log = pd.DataFrame(out[1:], columns=out[0]) return s.df_gps_like_log - def basic_to_pandas(s): """ Converts the basic stats to a pandas DataFrame. @@ -1293,12 +1320,223 @@ def od_to_pandas(s): s.od_analysis() - out = [["orig", "dest", "total_trips", "completed_trips", "free_travel_time", "average_travel_time", "stddiv_travel_time"]] + out = [["orig", "dest", "total_trips", "completed_trips", "free_travel_time", "average_travel_time", "stddiv_travel_time", "shortest_distance", "average_distance_traveled_per_veh", "stddiv_distance_traveled_per_veh"]] for o,d in s.od_trips.keys(): - out.append([o.name, d.name, s.od_trips[o,d], s.od_trips_comp[o,d], s.od_tt_free[o,d], s.od_tt_ave[o,d], s.od_tt_std[o,d]]) + out.append([o.name, d.name, s.od_trips[o,d], s.od_trips_comp[o,d], s.od_tt_free[o,d], s.od_tt_ave[o,d], s.od_tt_std[o,d], s.od_dist_min[o,d], s.od_dist_ave[o,d], s.od_dist_std[o,d]]) s.df_od = pd.DataFrame(out[1:], columns=out[0]) return s.df_od + + def areas2areas_to_pandas(s, areas, area_names=None): + """ + Converts the OD-specific analysis results to a pandas DataFrame. It analyzes travel stats between areas (set of nodes). + + Parameters + ---------- + areas : list + The list of areas. Each area is defined as a list of nodes. The items of area can be Node objects or names of Nodes. + area_names : list, optional + The list of names of areas. + + Returns + ------- + pd.DataFrame + """ + df = s.od_to_pandas() + + o_name_rec = [] + d_name_rec = [] + total_trips_rec = [] + completed_trips_rec = [] + average_travel_time_rec = [] + stddiv_travel_time_rec = [] + average_distance_traveled_rec = [] + stddiv_distance_traveled_rec = [] + + average_free_travel_time_rec = [] + average_shortest_distance_rec = [] + + areas = [[s.W.get_node(n).name for n in area] for area in areas] + if area_names == None: + area_names = [f"area {i} including {areas[i][0]}" for i in range(len(areas))] + + for i, origs in enumerate(areas): + for j, dests in enumerate(areas): + o_name = area_names[i] + d_name = area_names[j] + + # print(o_name, d_name) + + # group by area: average travel time from origs to dests + rows = df["orig"].isin(origs) & df["dest"].isin(dests) + total_tripss = np.array(df["total_trips"][rows]) + average_travel_times = np.array(df["average_travel_time"][rows]) + completed_tripss = np.array(df["completed_trips"][rows]) + var_travel_times = np.array(df["stddiv_travel_time"][rows])**2 + distance_traveleds = np.array(df["average_distance_traveled_per_veh"][rows]) + var_distance_traveleds = np.array(df["stddiv_distance_traveled_per_veh"][rows])**2 + + free_travel_time_times = np.array(df["free_travel_time"][rows]) + shortest_distances = np.array(df["shortest_distance"][rows]) + + # print(f"{total_tripss = }") + # print(f"{average_travel_times = }") + # print(f"{completed_tripss = }") + # print(f"{var_travel_times = }") + # print(f"{distance_traveleds = }") + # print(f"{var_distance_traveleds = }") + + total_trips = total_tripss.sum() + completed_trips = completed_tripss.sum() + + if total_trips: + average_travel_time = (completed_tripss*average_travel_times).sum()/completed_trips + var_travel_time = (completed_tripss*var_travel_times).sum()/completed_trips #wrong! there is a correct formula. TODO: implement + stddiv_travel_time = np.sqrt(var_travel_time) + + average_shortest_distance = (total_tripss*shortest_distances).sum()/total_trips + else: + continue + # average_travel_time = np.nan + # var_travel_time = np.nan + # stddiv_travel_time = np.nan + # average_shortest_distance = np.nan + + if completed_trips: + average_distance_traveled = (total_tripss*distance_traveleds).sum()/total_trips + var_distance_traveled = (total_tripss*distance_traveleds).sum()/total_trips #wrong! + stddiv_distance_traveled = np.sqrt(var_distance_traveled) + + average_free_travel_time = (completed_tripss*free_travel_time_times).sum()/completed_trips + else: + average_distance_traveled = np.nan + var_distance_traveled = np.nan + stddiv_distance_traveled = np.nan + average_free_travel_time = np.nan + + # print(f"{total_trips = }") + # print(f"{completed_trips = }") + # print(f"{average_travel_time = }") + # print(f"{stddiv_travel_time = }") + # print(f"{average_distance_traveled = }") + # print(f"{stddiv_distance_traveled = }") + + o_name_rec.append(o_name) + d_name_rec.append(d_name) + total_trips_rec.append(total_trips) + completed_trips_rec.append(completed_trips) + average_travel_time_rec.append(average_travel_time) + stddiv_travel_time_rec.append(stddiv_travel_time) + average_distance_traveled_rec.append(average_distance_traveled) + stddiv_distance_traveled_rec.append(stddiv_distance_traveled) + average_free_travel_time_rec.append(average_free_travel_time) + average_shortest_distance_rec.append(average_shortest_distance) + + out = [["origin_area", "destination_area", "total_trips", "completed_trips", "average_travel_time", "average_free_travel_time", "average_distance_traveled", "average_shortest_distance"]] + out += [[o_name_rec[i], d_name_rec[i], total_trips_rec[i], completed_trips_rec[i], average_travel_time_rec[i], average_free_travel_time_rec[i], average_distance_traveled_rec[i], average_shortest_distance_rec[i]] for i in range(len(o_name_rec))] + + s.df_areas2areas = pd.DataFrame(out[1:], columns=out[0]) + return s.df_areas2areas + + def vehicle_groups_to_pandas(s, groups, group_names=None): + """ + Converts the vehicle group analysis results to a pandas DataFrame. + + Parameters + ---------- + groups : list + The list of vehicle groups. Each group is defined as a list of vehicle object. + group_names : list, optional + The list of names of vehicle groups. + + Returns + ------- + pd.DataFrame + """ + df_od = s.W.analyzer.od_to_pandas() + + if group_names == None: + group_names = [f"group {i} including {groups[0].name}" for i in range(len(groups))] + + total_trip_rec = [] + completed_trip_rec = [] + average_travel_time_rec = [] + average_delay_rec = [] + std_delay_rec = [] + average_traveled_distance_rec = [] + average_detour_rec = [] + std_detour_rec = [] + averae_speed_rec = [] + std_speed_rec = [] + for i, group in enumerate(groups): + total_trips = 0 + completed_trips = 0 + travel_times = [] + delays = [] + traveled_distances = [] + detours = [] + speeds = [] + + + for veh in group: + + total_trips += 1 + if veh.state == "end": + completed_trips += 1 + travel_times.append(veh.travel_time) + traveled_distances.append(veh.distance_traveled) + + free_travel_time = df_od["free_travel_time"][(df_od["orig"]==veh.orig.name) & (df_od["dest"]==veh.dest.name)].values[0] + shortest_distance = df_od["shortest_distance"][(df_od["orig"]==veh.orig.name) & (df_od["dest"]==veh.dest.name)].values[0] + + delays.append(veh.travel_time/free_travel_time) + detours.append(veh.distance_traveled/shortest_distance) + + speeds.append(veh.distance_traveled/veh.travel_time) + + #print(f"{group_names[i]=}, {np.average(travel_times)=}, {np.average(traveled_distances)=}, {np.average(delays)=}, {np.average(detours)=}, {np.std(delays)=}, {np.std(detours)=}, {np.average(speeds)}, {np.std(speeds)}") + + total_trip_rec.append(total_trips) + completed_trip_rec.append(completed_trips) + if completed_trips > 0: + average_travel_time_rec.append(np.average(travel_times)) + average_delay_rec.append(np.average(delays)) + std_delay_rec.append(np.std(delays)) + average_traveled_distance_rec.append(np.average(traveled_distances)) + average_detour_rec.append(np.average(detours)) + std_detour_rec.append(np.std(detours)) + averae_speed_rec.append(np.average(speeds)) + std_speed_rec.append(np.std(speeds)) + else: + average_travel_time_rec.append(np.nan) + average_delay_rec.append(np.nan) + std_delay_rec.append(np.nan) + average_traveled_distance_rec.append(np.nan) + average_detour_rec.append(np.nan) + std_detour_rec.append(np.nan) + averae_speed_rec.append(np.nan) + std_speed_rec.append(np.nan) + + df = pd.DataFrame({ + "group": group_names, + "total_trips": total_trip_rec, + "completed_trips": completed_trip_rec, + "average_travel_time": average_travel_time_rec, + "average_delay_ratio": average_delay_rec, + "std_delay_ratio": std_delay_rec, + "average_traveled_distance": average_traveled_distance_rec, + "average_detour_ratio": average_detour_rec, + "std_detour_ratio": std_detour_rec, + "average_speed": averae_speed_rec, + "std_speed": std_speed_rec, + }) + + s.df_vehicle_groups = df + + return s.df_vehicle_groups + + + def mfd_to_pandas(s, links=None): """ diff --git a/uxsim/uxsim.py b/uxsim/uxsim.py index 43870fa..1b685b8 100644 --- a/uxsim/uxsim.py +++ b/uxsim/uxsim.py @@ -844,6 +844,7 @@ def __init__(s, W, orig, dest, departure_time, name=None, route_pref=None, route s.departure_time = departure_time else: s.departure_time = int(departure_time/s.W.DELTAT) + s.departure_time_in_second = departure_time*s.W.DELTAT #TODO: temporal workaround s.arrival_time = -1 s.link_arrival_time = -1 s.travel_time = -1 @@ -911,6 +912,8 @@ def __init__(s, W, orig, dest, departure_time, name=None, route_pref=None, route s.color = (s.W.rng.random(), s.W.rng.random(), s.W.rng.random()) s.log_t_link = [[int(s.departure_time*s.W.DELTAT), "home"]] #新たなリンクに入った時にその時刻とリンクのみを保存.経路分析用 + + s.distance_traveled = 0 s.attribute = attribute @@ -1800,6 +1803,49 @@ def adddemand_area2area(W, x_orig, y_orig, radious_orig, x_dest, y_dest, radiou for d in dests: W.adddemand(o, d, t_start, t_end, flow, volume, attribute, direct_call=False) + def adddemand_areas2areas(W, origs, dests, t_start, t_end, flow=-1, volume=-1, attribute=None): + """ + Generate vehicles by specifying time-dependent origin-destination demand by specifying areas (i.e., sets of nodes). + + Parameters + ---------- + origs : list + The list of origin nodes. The items can be Node objects or names of Nodes. + dests : list + The list of destination nodes. The items can be Node objects or names of Nodes. + t_start : float + The start time for the demand in seconds. + t_end : float + The end time for the demand in seconds. + flow : float, optional + The flow rate from the origin to the destination in vehicles per second. + volume: float, optional + The demand volume from the origin to the destination. If volume is specified, the flow is ignored. + attribute : any, optinonal + Additional (meta) attributes defined by users. + """ + + origs_new = [] + dests_new = [] + for oo in origs: + o = W.get_node(oo) + if len(o.outlinks) != 0: + origs_new.append(o) + for dd in dests: + d = W.get_node(dd) + if len(d.inlinks) != 0: + dests_new.append(d) + origs = origs_new + dests = dests_new + + if flow != -1: + flow = flow/(len(origs)*len(dests)) + if volume != -1: + volume = volume/(len(origs)*len(dests)) + for o in origs: + for d in dests: + W.adddemand(o, d, t_start, t_end, flow, volume, attribute, direct_call=True) + def finalize_scenario(W, tmax=None): """ Finalizes the settings and preparations for the simulation scenario execution.