2020from shapely import force_2d
2121
2222from ..geopandas_tools .general import _push_geom_col
23- from ..geopandas_tools .sfilter import sfilter_inverse
2423from ._get_route import _get_k_routes
2524from ._get_route import _get_route
2625from ._get_route import _get_route_frequencies
3130from .cutting_lines import split_lines_by_nearest_point
3231from .network import Network
3332from .networkanalysisrules import NetworkAnalysisRules
34- from .nodes import _map_node_ids_from_wkt
35- from .nodes import make_node_ids
3633
3734
3835class NetworkAnalysis :
@@ -413,8 +410,8 @@ def od_cost_matrix(
413410 if lines :
414411 results = _push_geom_col (results )
415412
416- if self .rules .split_lines :
417- self ._unsplit_network ()
413+ # if self.rules.split_lines:
414+ # self._unsplit_network()
418415
419416 if self ._log :
420417 minutes_elapsed = round ((perf_counter () - time_ ) / 60 , 1 )
@@ -650,8 +647,8 @@ def multiindex_mapper(x: tuple[int, int]) -> tuple[int, int]:
650647
651648 results .geometry = force_2d (results .geometry )
652649
653- if self .rules .split_lines :
654- self ._unsplit_network ()
650+ # if self.rules.split_lines:
651+ # self._unsplit_network()
655652
656653 if self ._log :
657654 minutes_elapsed = round ((perf_counter () - time_ ) / 60 , 1 )
@@ -760,8 +757,8 @@ def get_route(
760757 results ["origin" ] = results ["origin" ].map (self .origins .idx_dict )
761758 results ["destination" ] = results ["destination" ].map (self .destinations .idx_dict )
762759
763- if self .rules .split_lines :
764- self ._unsplit_network ()
760+ # if self.rules.split_lines:
761+ # self._unsplit_network()
765762
766763 if self ._log :
767764 minutes_elapsed = round ((perf_counter () - time_ ) / 60 , 1 )
@@ -928,8 +925,8 @@ def get_k_routes(
928925 if isinstance (results , GeoDataFrame ):
929926 results = _push_geom_col (results )
930927
931- if self .rules .split_lines :
932- self ._unsplit_network ()
928+ # if self.rules.split_lines:
929+ # self._unsplit_network()
933930
934931 if self ._log :
935932 minutes_elapsed = round ((perf_counter () - time_ ) / 60 , 1 )
@@ -1054,8 +1051,8 @@ def service_area(
10541051
10551052 results = _push_geom_col (results )
10561053
1057- if self .rules .split_lines :
1058- self ._unsplit_network ()
1054+ # if self.rules.split_lines:
1055+ # self._unsplit_network()
10591056
10601057 if self ._log :
10611058 minutes_elapsed = round ((perf_counter () - time_ ) / 60 , 1 )
@@ -1183,8 +1180,8 @@ def precice_service_area(
11831180
11841181 results = _push_geom_col (results )
11851182
1186- if self .rules .split_lines :
1187- self ._unsplit_network ()
1183+ # if self.rules.split_lines:
1184+ # self._unsplit_network()
11881185
11891186 if self ._log :
11901187 minutes_elapsed = round ((perf_counter () - time_ ) / 60 , 1 )
@@ -1371,6 +1368,8 @@ def _prepare_network_analysis(
13711368 "rowwise=True"
13721369 )
13731370
1371+ self ._unsplit_network ()
1372+
13741373 self .network .gdf = self .rules ._validate_weight (self .network .gdf )
13751374
13761375 self .origins = Origins (origins )
@@ -1383,6 +1382,7 @@ def _prepare_network_analysis(
13831382 self .destinations ._make_temp_idx (
13841383 start = max (self .origins .gdf ["temp_idx" ].astype (int )) + 1
13851384 )
1385+
13861386 else :
13871387 self .destinations = None
13881388
@@ -1413,12 +1413,13 @@ def _get_edges_and_weights(
14131413 """
14141414 if self .rules .split_lines :
14151415 self ._split_lines ()
1416+ self .network ._make_node_ids ()
14161417 self .origins ._make_temp_idx (
1417- start = max (self .network .nodes . node_id .astype (int )) + 1
1418+ start = max (self .network .nodes [ " node_id" ] .astype (int )) + 1
14181419 )
14191420 if self .destinations is not None :
14201421 self .destinations ._make_temp_idx (
1421- start = max (self .origins .gdf . temp_idx .astype (int )) + 1
1422+ start = max (self .origins .gdf [ " temp_idx" ] .astype (int )) + 1
14221423 )
14231424
14241425 edges : list [tuple [str , str ]] = self .network .get_edges ()
@@ -1427,7 +1428,6 @@ def _get_edges_and_weights(
14271428
14281429 self .network .gdf ["src_tgt_wt" ] = self .network ._create_edge_ids (edges , weights )
14291430
1430- # add edges between origins+destinations to the network nodes
14311431 edges_start , weights_start = self .origins ._get_edges_and_weights (
14321432 nodes = self .network .nodes ,
14331433 rules = self .rules ,
@@ -1484,7 +1484,7 @@ def _split_lines(self) -> None:
14841484
14851485 points = points .drop_duplicates (points .geometry .name )
14861486
1487- self .network .gdf ["_meters2 " ] = self .network .gdf .length
1487+ self .network .gdf ["meters_ " ] = self .network .gdf .length
14881488
14891489 # create an id from before the split, used to revert the split later
14901490 self .network .gdf ["temp_idx__" ] = range (len (self .network .gdf ))
@@ -1497,39 +1497,16 @@ def _split_lines(self) -> None:
14971497 )
14981498
14991499 # save the unsplit lines for later
1500- splitted = lines .loc [lines ["splitted" ] == 1 ]
1500+ splitted = lines .loc [lines ["splitted" ] == 1 , "temp_idx__" ]
15011501 self .network ._not_splitted = self .network .gdf .loc [
1502- lambda x : x ["temp_idx__" ].isin (splitted [ "temp_idx__" ] )
1502+ self . network . gdf ["temp_idx__" ].isin (splitted )
15031503 ]
15041504
1505- new_lines , new_nodes = make_node_ids (splitted )
1506- new_nodes = sfilter_inverse (new_nodes , self .network .nodes .buffer (1e-5 ))
1507- new_nodes ["node_id" ] = (
1508- new_nodes ["node_id" ].astype (int ) + len (self .network .nodes ) + 1
1509- ).astype (str )
1510- self .network ._new_node_ids = list (new_nodes ["node_id" ])
1511-
15121505 # adjust weight to new length
1513- new_lines [self .rules .weight ] = new_lines [self .rules .weight ] * (
1514- new_lines .length / new_lines ["_meters2" ]
1515- )
1516- self .network ._nodes = pd .concat (
1517- [self .network ._nodes , new_nodes ],
1518- ignore_index = True ,
1519- )
1520-
1521- lines = pd .concat (
1522- [
1523- self .network .gdf .loc [
1524- lambda x : ~ x ["temp_idx__" ].isin (splitted ["temp_idx__" ])
1525- ],
1526- new_lines ,
1527- ],
1528- ignore_index = True ,
1506+ lines [self .rules .weight ] = lines [self .rules .weight ] * (
1507+ lines .length / lines ["meters_" ]
15291508 )
15301509
1531- lines = _map_node_ids_from_wkt (lines , self .network ._nodes )
1532-
15331510 self .network .gdf = lines
15341511
15351512 def _unsplit_network (self ):
@@ -1540,10 +1517,7 @@ def _unsplit_network(self):
15401517 self .network .gdf = pd .concat (
15411518 [lines , self .network ._not_splitted ], ignore_index = True
15421519 ).drop ("temp_idx__" , axis = 1 )
1543- self .network ._nodes = self .network ._nodes [
1544- lambda x : ~ x ["node_id" ].isin (self .network ._new_node_ids )
1545- ]
1546- del self .network ._not_splitted , self .network ._new_node_ids
1520+ del self .network ._not_splitted
15471521
15481522 @staticmethod
15491523 def _make_graph (
@@ -1587,7 +1561,7 @@ def _graph_is_up_to_date(self) -> bool:
15871561 for points in ["origins" , "destinations" ]:
15881562 if self [points ] is None :
15891563 continue
1590- if not hasattr ( self , points ) or self [ points ] is None :
1564+ if points not in self . wkts :
15911565 return False
15921566 if self ._points_have_changed (self [points ].gdf , what = points ):
15931567 return False
@@ -1603,7 +1577,7 @@ def _points_have_changed(self, points: GeoDataFrame, what: str) -> bool:
16031577 if not np .array_equal (self .wkts [what ], points .geometry .to_wkt ().values ):
16041578 return True
16051579
1606- if not all (x in self .graph .vs ["name" ] for x in list (points . temp_idx .values )):
1580+ if not all (x in self .graph .vs ["name" ] for x in list (points [ " temp_idx" ] .values )):
16071581 return True
16081582
16091583 return False
@@ -1617,6 +1591,8 @@ def _update_wkts(self) -> None:
16171591 """
16181592 self .wkts = {}
16191593
1594+ self .wkts ["network" ] = self .network .gdf .geometry .to_wkt ().values
1595+
16201596 if not hasattr (self , "origins" ):
16211597 return
16221598
0 commit comments