Skip to content

Commit

Permalink
add area stats
Browse files Browse the repository at this point in the history
  • Loading branch information
toruseo committed Aug 19, 2024
1 parent 1ef1cd2 commit 875d018
Show file tree
Hide file tree
Showing 2 changed files with 144 additions and 17 deletions.
57 changes: 55 additions & 2 deletions tests/test_other_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,7 +456,7 @@ def test_printtry():
printtry(lambda: (lis[10]))
assert True

def test_area():
def test_area2area_demand_and_stats():
W = World(
name="",
deltan=10,
Expand Down Expand Up @@ -511,8 +511,61 @@ def test_area():
assert df["average_free_travel_time"][(df["origin_area"] == "areaNW") & (df["destination_area"] == "areaSE")].values[0] == 400.0
assert df["average_shortest_distance"][(df["origin_area"] == "areaNW") & (df["destination_area"] == "areaSE")].values[0] == 8000.0


@pytest.mark.flaky(reruns=10)
def test_area_stats():
W = World(
name="",
deltan=10,
tmax=3000,
print_mode=1, save_mode=1, show_mode=0,
random_seed=None,
)

n_nodes = 4
imax = n_nodes
jmax = n_nodes
nodes = {}
for i in range(imax):
for j in range(jmax):
nodes[i,j] = W.addNode(f"n{(i,j)}", i, j, flow_capacity=1.6)

links = {}
for i in range(imax):
for j in range(jmax):
if i != imax-1:
links[i,j,i+1,j] = W.addLink(f"l{(i,j,i+1,j)}", nodes[i,j], nodes[i+1,j], length=1000)
if i != 0:
links[i,j,i-1,j] = W.addLink(f"l{(i,j,i-1,j)}", nodes[i,j], nodes[i-1,j], length=1000)
if j != jmax-1:
links[i,j,i,j+1] = W.addLink(f"l{(i,j,i,j+1)}", nodes[i,j], nodes[i,j+1], length=1000)
if j != 0:
links[i,j,i,j-1] = W.addLink(f"l{(i,j,i,j-1)}", nodes[i,j], nodes[i,j-1], length=1000)


area_dict = {
"areaN": [nodes[0,i] for i in range(n_nodes)],
"areaS": [nodes[n_nodes-1,i] for i in range(n_nodes)],
"areaNW": [nodes[0,0]],
"areaSE": [nodes[n_nodes-1, n_nodes-1]]
}

W.adddemand_areas2areas(area_dict["areaN"], area_dict["areaS"], 0, 3000, volume=7000)

W.exec_simulation()
W.analyzer.print_simple_stats()

df = W.analyzer.area_to_pandas(list(area_dict.values()), list(area_dict.keys()), border_include=True)
print(df)

assert equal_tolerance(df["traffic_volume"][df["area"] == "areaN"].values[0], 6900)
assert equal_tolerance(df["traffic_volume"][df["area"] == "areaS"].values[0], 6300)
assert equal_tolerance(df["total_travel_time"][df["area"] == "areaN"].values[0], 700000, rel_tol=0.2)
assert equal_tolerance(df["average_delay"][df["area"] == "areaN"].values[0], 0.73, abs_tol=0.2)


@pytest.mark.flaky(reruns=10)
def test_vehicle_group():
def test_vehicle_group_stats():
W = World(
name="",
deltan=10,
Expand Down
104 changes: 89 additions & 15 deletions uxsim/analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1198,7 +1198,7 @@ def plot_vehicles_log(s, vehnamelist):

def vehicles_to_pandas(s):
"""
Converts the vehicle travel logs to a pandas DataFrame.
Compute the detailed vehicle travel logs and return as a pandas DataFrame.
Returns
-------
Expand Down Expand Up @@ -1249,12 +1249,12 @@ def log_vehicles_to_pandas(s):

def vehicle_trip_to_pandas(s):
"""
Converts the vehicle trip top to a pandas DataFrame.
Compute the vehicle trip summary and return as a pandas DataFrame.
Returns
-------
pd.DataFrame
A DataFrame containing the top of the vehicle trip logs, with the columns:
A DataFrame containing the trip summary of the vehicle trip logs, with the columns:
- 'name': the name of the vehicle (platoon).
- 'orig': the origin node of the vehicle's trip.
Expand All @@ -1279,7 +1279,7 @@ def vehicle_trip_to_pandas(s):

def gps_like_log_to_pandas(s):
"""
Generate GPS-like log (x and y in the coordinate system used for Node) of vehicles.
Generate GPS-like log (x and y in the coordinate system used for Node) of vehicles and return as a pandas DataFrame.
Returns
-------
Expand All @@ -1298,7 +1298,7 @@ def gps_like_log_to_pandas(s):

def basic_to_pandas(s):
"""
Converts the basic stats to a pandas DataFrame.
Comutes the basic stats and return as a pandas DataFrame.
Returns
-------
Expand All @@ -1311,7 +1311,7 @@ def basic_to_pandas(s):

def od_to_pandas(s):
"""
Converts the OD-specific analysis results to a pandas DataFrame.
Compute the OD-specific stats and return as a pandas DataFrame.
Returns
-------
Expand All @@ -1329,7 +1329,7 @@ def od_to_pandas(s):

def areas2areas_to_pandas(s, areas, area_names=None):
"""
Converts the OD-specific analysis results to a pandas DataFrame. It analyzes travel stats between areas (set of nodes).
Compute the area-wise OD-specific stats and return a pandas DataFrame. It analyzes travel stats between areas (set of nodes).
Parameters
----------
Expand Down Expand Up @@ -1438,9 +1438,86 @@ def areas2areas_to_pandas(s, areas, area_names=None):
s.df_areas2areas = pd.DataFrame(out[1:], columns=out[0])
return s.df_areas2areas

def area_to_pandas(s, areas, area_names=None, border_include=True):
"""
Compute traffic stats in area and return as pandas.DataFrame.
Parameters
----------
areas : list
The list of areas. Each area is defined as a list of nodes. The items of area can be Node objects or names of Nodes.
area_names : list, optional
The list of names of areas.
border_include : bool, optional
If set to True, the links on the border of the area are included in the analysis. Default is True.
Returns
-------
pd.DataFrame
"""
n_links_rec = []
traffic_volume_rec = []
vehicles_remain_rec = []
total_travel_time_rec = []
average_delay_rec = []


for i,area in enumerate(areas):
area = {s.W.get_node(n).name:0 for n in area}

df = s.W.analyzer.link_to_pandas()
df_veh_link = s.W.analyzer.vehicles_to_pandas().drop_duplicates(subset=['name', 'link'])

n_links = 0
traffic_volume = 0
vehicles_remain = 0
total_travel_time = 0
total_free_time = 0
average_delay = 0

if border_include:
rows = df["start_node"].isin(area) | df["end_node"].isin(area)
else:
rows = df["start_node"].isin(area) & df["end_node"].isin(area)
links = {l:0 for l in df["link"][rows].values}

n_links = sum(rows)
traffic_volume = df_veh_link[df_veh_link["link"].isin(links)].drop_duplicates(subset="name").shape[0]*s.W.DELTAN
vehicles_remain = df["vehicles_remain"][rows].sum()
if traffic_volume > 0:
total_travel_time = (df["average_travel_time"][rows] * (df["traffic_volume"][rows]-df["vehicles_remain"][rows])).values.sum()
total_free_time = (df["free_travel_time"][rows] * (df["traffic_volume"][rows]-df["vehicles_remain"][rows])).values.sum()
average_delay = total_travel_time/total_free_time - 1
if average_delay < 0:
average_delay = 0
else:
total_travel_time = 0
total_free_time = 0
average_delay = np.nan

#print(f"{n_links=}, {traffic_volume=}, {vehicles_remain=}, {total_travel_time=}, {total_free_time=}, {average_delay=}")

n_links_rec.append(n_links)
traffic_volume_rec.append(traffic_volume)
vehicles_remain_rec.append(vehicles_remain)
total_travel_time_rec.append(total_travel_time)
average_delay_rec.append(average_delay)

df = pd.DataFrame({
"area": area_names,
"n_links": n_links_rec,
"traffic_volume": traffic_volume_rec,
"vehicles_remain": vehicles_remain_rec,
"total_travel_time": total_travel_time_rec,
"average_delay": average_delay_rec
})
s.df_area = df

return s.df_area

def vehicle_groups_to_pandas(s, groups, group_names=None):
"""
Converts the vehicle group analysis results to a pandas DataFrame.
Computes the stats of vehicle group and return as a pandas DataFrame.
Parameters
----------
Expand Down Expand Up @@ -1535,12 +1612,9 @@ def vehicle_groups_to_pandas(s, groups, group_names=None):

return s.df_vehicle_groups




def mfd_to_pandas(s, links=None):
"""
Converts the MFD to a pandas DataFrame.
Compute the MFD-like stats and return as a pandas DataFrame.
Returns
-------
Expand Down Expand Up @@ -1576,7 +1650,7 @@ def link_to_pandas(s):

def link_traffic_state_to_pandas(s):
"""
Converts the traffic states in links to a pandas DataFrame.
Compute the traffic states in links and return as a pandas DataFrame.
Returns
-------
Expand All @@ -1594,7 +1668,7 @@ def link_traffic_state_to_pandas(s):

def link_cumulative_to_pandas(s):
"""
Converts the cumulative counts etc. in links to a pandas DataFrame.
Compute the cumulative counts etc. in links and return as a pandas DataFrame.
Returns
-------
Expand All @@ -1610,7 +1684,7 @@ def link_cumulative_to_pandas(s):
@catch_exceptions_and_warn()
def output_data(s, fname=None):
"""
Save all results to CSV files
Save all results to CSV files. This is obsolute; not all functions are implemented.
"""
if fname == None:
fname = f"out{s.W.name}/data"
Expand Down

0 comments on commit 875d018

Please sign in to comment.