diff --git a/ORMetroModel/.gitattributes b/ORMetroModel/.gitattributes
new file mode 100644
index 0000000..dd16bb2
--- /dev/null
+++ b/ORMetroModel/.gitattributes
@@ -0,0 +1,3 @@
+# Auto detect text files and perform LF normalization
+* text=auto
+*.ver filter=lfs diff=lfs merge=lfs -text
diff --git a/ORMetroModel/config/config.properties b/ORMetroModel/config/config.properties
new file mode 100644
index 0000000..5de2640
--- /dev/null
+++ b/ORMetroModel/config/config.properties
@@ -0,0 +1,56 @@
+#OR-RAMP TM2 Properties
+
+#############################################################################################################################################################################
+#
+# CLUSTER PROPERTIES: MODIFY WHEN CHANGING CLUSTER CONFIGURATION OR MOVING TO NEW CLUSTER.
+#
+#############################################################################################################################################################################
+RunModel.MatrixServerAddress=%HOST_IP_ADDRESS%
+RunModel.MatrixServerPort=1191
+RunModel.HouseholdServerAddress=%HOST_IP_ADDRESS%
+RunModel.HouseholdServerPort= 1117
+
+
+#############################################################################################################################################################################
+#
+# LOGGING AND DEBUGGING PROPERTIES: USE FOR TRACING HOUSEHOLDS OR AGENTS THROUGH SIMULATION.
+#
+# Note that the way that logging works right now, the trace zones also have to be valid transit stops or the code will crash. Check the skims to make sure they exist.
+# Turn off trace debugging in routine model runs to speed things up (comment out Debug.Trace.HouseholdIdList)
+#
+#############################################################################################################################################################################
+#Resident model
+Trace = true
+Trace.otaz = 0
+Trace.dtaz = 0
+Seek = false
+Process.Debug.HHs.Only = false
+Debug.Trace.HouseholdIdList=3946
+#run.this.household.only=1011947
+
+# save tour mode choice utilities and probabilities (for debugging purpose)
+TourModeChoice.Save.UtilsAndProbs = true
+
+
+#############################################################################################################################################################################
+#
+# PATH PROPERTIES: MODIFY AS NEEDED WHEN COPY RELEASE TO A LOCAL RUN FOLDER
+#
+#############################################################################################################################################################################
+Project.Directory = %project.folder%
+# Input path prefix
+generic.path = %project.folder%
+scenario.path = %project.folder%/
+skims.path = %project.folder%/outputs/skims
+uec.path = %project.folder%/uec/
+
+
+#############################################################################################################################################################################
+#
+# INPUT PROPERTIES
+#
+#############################################################################################################################################################################
+
+#Input Version file
+Input.Version.File = LCOG_v4.ver
+#Input.Version.File = inputs/LCOG_v4.ver
diff --git a/ORMetroModel/config/visum_lcog/FeedbackAssignmentSequence_2.xml b/ORMetroModel/config/visum_lcog/FeedbackAssignmentSequence_2.xml
new file mode 100644
index 0000000..d2408cc
--- /dev/null
+++ b/ORMetroModel/config/visum_lcog/FeedbackAssignmentSequence_2.xml
@@ -0,0 +1,809 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/config/visum_lcog/FinalAssignmentSequence_3.xml b/ORMetroModel/config/visum_lcog/FinalAssignmentSequence_3.xml
new file mode 100644
index 0000000..398afaf
--- /dev/null
+++ b/ORMetroModel/config/visum_lcog/FinalAssignmentSequence_3.xml
@@ -0,0 +1,1054 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/config/visum_lcog/SkimSequence_1.xml b/ORMetroModel/config/visum_lcog/SkimSequence_1.xml
new file mode 100644
index 0000000..1e14574
--- /dev/null
+++ b/ORMetroModel/config/visum_lcog/SkimSequence_1.xml
@@ -0,0 +1,1020 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/config/visum_lcog/WarmStartSequence_0.xml b/ORMetroModel/config/visum_lcog/WarmStartSequence_0.xml
new file mode 100644
index 0000000..32f8ac5
--- /dev/null
+++ b/ORMetroModel/config/visum_lcog/WarmStartSequence_0.xml
@@ -0,0 +1,599 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/config/visum_metro/OriginalSequence_052025.xml b/ORMetroModel/config/visum_metro/OriginalSequence_052025.xml
new file mode 100644
index 0000000..5b9fa45
--- /dev/null
+++ b/ORMetroModel/config/visum_metro/OriginalSequence_052025.xml
@@ -0,0 +1,566 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/config/visum_metro/SkimSequence_1.xml b/ORMetroModel/config/visum_metro/SkimSequence_1.xml
new file mode 100644
index 0000000..f54d267
--- /dev/null
+++ b/ORMetroModel/config/visum_metro/SkimSequence_1.xml
@@ -0,0 +1,2657 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/filters/Link_AllowsAuto.fil b/ORMetroModel/filters/Link_AllowsAuto.fil
new file mode 100644
index 0000000..f0061cd
--- /dev/null
+++ b/ORMetroModel/filters/Link_AllowsAuto.fil
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/filters/Link_TypeNo1+.fil b/ORMetroModel/filters/Link_TypeNo1+.fil
new file mode 100644
index 0000000..34c8656
--- /dev/null
+++ b/ORMetroModel/filters/Link_TypeNo1+.fil
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/filters/OP_Transit.fil b/ORMetroModel/filters/OP_Transit.fil
new file mode 100644
index 0000000..1ace9d6
--- /dev/null
+++ b/ORMetroModel/filters/OP_Transit.fil
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/filters/PK_Transit.fil b/ORMetroModel/filters/PK_Transit.fil
new file mode 100644
index 0000000..f53febb
--- /dev/null
+++ b/ORMetroModel/filters/PK_Transit.fil
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/filters/TSys_bus.fil b/ORMetroModel/filters/TSys_bus.fil
new file mode 100644
index 0000000..b0319ef
--- /dev/null
+++ b/ORMetroModel/filters/TSys_bus.fil
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/filters/TSys_nonbus.fil b/ORMetroModel/filters/TSys_nonbus.fil
new file mode 100644
index 0000000..e30f8ea
--- /dev/null
+++ b/ORMetroModel/filters/TSys_nonbus.fil
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ORMetroModel/outputs/assignment/feedback/.gitignore b/ORMetroModel/outputs/assignment/feedback/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/assignment/feedback/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/assignment/final_assign/.gitignore b/ORMetroModel/outputs/assignment/final_assign/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/assignment/final_assign/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/reports/.gitignore b/ORMetroModel/outputs/reports/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/reports/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/skims/.gitignore b/ORMetroModel/outputs/skims/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/skims/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/trips/logs/.gitignore b/ORMetroModel/outputs/trips/logs/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/trips/logs/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/trips/model/peak/.gitignore b/ORMetroModel/outputs/trips/model/peak/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/trips/model/peak/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/trips/model/reports/.gitignore b/ORMetroModel/outputs/trips/model/reports/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/trips/model/reports/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/trips/model_hbo/.gitignore b/ORMetroModel/outputs/trips/model_hbo/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/trips/model_hbo/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/trips/model_hbr/.gitignore b/ORMetroModel/outputs/trips/model_hbr/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/trips/model_hbr/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/trips/model_hbs/.gitignore b/ORMetroModel/outputs/trips/model_hbs/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/trips/model_hbs/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/trips/model_hbw/.gitignore b/ORMetroModel/outputs/trips/model_hbw/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/trips/model_hbw/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/trips/model_nh/.gitignore b/ORMetroModel/outputs/trips/model_nh/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/trips/model_nh/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/outputs/trips/model_sc/.gitignore b/ORMetroModel/outputs/trips/model_sc/.gitignore
new file mode 100644
index 0000000..86d0cb2
--- /dev/null
+++ b/ORMetroModel/outputs/trips/model_sc/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
\ No newline at end of file
diff --git a/ORMetroModel/scripts/AssignmentMetrics_Export.py b/ORMetroModel/scripts/AssignmentMetrics_Export.py
new file mode 100644
index 0000000..9b43438
--- /dev/null
+++ b/ORMetroModel/scripts/AssignmentMetrics_Export.py
@@ -0,0 +1,837 @@
+# SRTC: Full script to handle Assignment Summary, Model Reporting, and Results export. Uses same timestamped folder for all outputs
+# Adapted to LCOG, only outputs Assignment Summary
+
+"""
+created 5/13/2025
+
+@author: luke.gordon
+
+"""
+
+# Libraries
+import VisumPy.helpers
+import VisumPy.excel
+import pandas as pd
+import numpy as np
+import csv
+from datetime import datetime
+import math
+import os.path
+
+
+# Pull timestamp for folder name from Visum network attribute
+date = Visum.Net.AttValue("output_date")
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Formatting functions
+# Formatting for columns
+# Create formatting function for large numbers (no decimals and thousand commas)
+def format_commas(column):
+ return column.apply(lambda x: '{:,.0f}'.format(x) if pd.notna(x) else None)
+# Create formatting function for percentages (2 decimals and percent symbol, also multiplies by 100)
+def format_percent(column):
+ return column.apply(lambda x: '{:.2%}'.format(x) if pd.notna(x) else None)
+# Create formatting function for small numbers (2 decimals)
+def format_twoplaces(column):
+ return column.apply(lambda x: '{:.2f}'.format(x) if pd.notna(x) else None)
+# Create formatting function for small numbers (1 decimal)
+def format_oneplace(column):
+ return column.apply(lambda x: '{:.1f}'.format(x) if pd.notna(x) else None)
+# Create formatting function for small numbers (0 decimals)
+def format_zeroplaces(column):
+ return column.apply(lambda x: '{:.0f}'.format(x) if pd.notna(x) else None)
+
+
+# Formatting for single cells
+# Create formatting function for large numbers (no decimals and thousand commas)
+def format_commas_cell(cell_value):
+ return '{:,.0f}'.format(cell_value) if pd.notna(cell_value) else None
+# Create formatting function for percentages (2 decimals and percent symbol, also multiplies by 100)
+def format_percent_cell(cell_value):
+ return '{:.2%}'.format(cell_value) if pd.notna(cell_value) else None
+# Create formatting function for small numbers (2 decimals)
+def format_twoplaces_cell(cell_value):
+ return '{:.2f}'.format(cell_value) if pd.notna(cell_value) else None
+# Create formatting function for small numbers (0 decimals)
+def format_zeroplaces_cell(cell_value):
+ return '{:.0f}'.format(cell_value) if pd.notna(cell_value) else None
+
+
+# Round to nearest even number function. Used in Volume corridor reporting
+def round_to_nearest_even(number):
+ rounded = round(number)
+ return rounded + (rounded % 2 == 1)
+
+# Create Percent Error function
+def pct_error(count , flow):
+ error = ((sum(flow)/len(flow)) - (sum(count)/len(count))) / (sum(count)/len(count))
+
+ return error
+
+# Create Percent RMSE function
+def pct_rmse(count , sqerror):
+ rmse = math.sqrt(sum(sqerror)/len(sqerror))/(sum(count)/len(count))
+
+ return rmse
+
+# Create VMT function (needs to pull data from all links, not just ones with counts)
+def vmt(flow , length):
+ vmt = sum(flow*length)
+
+ return vmt
+
+
+# Create VHT function (needs to pull data from all links and periods, not just ones with counts)
+ # Need to have logic to handle daily vs. a single period
+def vht_dly(am_flow, am_time, pm_flow, pm_time, op_flow, op_time):
+ vht = sum(am_flow*(am_time/3600)) + sum(pm_flow*(pm_time/3600)) + sum(op_flow*(op_time/3600))
+ return vht
+def vht_per(flow, time):
+ vht = sum(flow*time)
+ return vht
+
+
+
+# Assignment summary function
+def assignment_summary(auto_count, sut_count, mut_count, all_count, auto_flow, sut_flow, mut_flow, all_flow): #, cong_auto_time, cong_trk_time, period):
+ # DAILY
+ # Percent Error and Percent RMSE
+ # Import ID fields and fields with Counts and Flows
+ # Link ID fields
+ NO = VisumPy.helpers.GetMulti(Visum.Net.Links,"No", activeOnly = True)
+ FCLASS = VisumPy.helpers.GetMulti(Visum.Net.Links,"TYPENO", activeOnly = True)
+ LENGTH = VisumPy.helpers.GetMulti(Visum.Net.Links,"Length", activeOnly = True)
+ SCRNLINE = VisumPy.helpers.GetMulti(Visum.Net.Links,r"CONCATENATE:SCREENLINES\CODE", activeOnly = True)
+
+ # Counts
+ Auto_Count = VisumPy.helpers.GetMulti(Visum.Net.Links,auto_count, activeOnly = True)
+ SUT_Count = VisumPy.helpers.GetMulti(Visum.Net.Links,sut_count, activeOnly = True)
+ MUT_Count = VisumPy.helpers.GetMulti(Visum.Net.Links,mut_count, activeOnly = True)
+ Tot_Count = VisumPy.helpers.GetMulti(Visum.Net.Links,all_count, activeOnly = True)
+ AADT = VisumPy.helpers.GetMulti(Visum.Net.Links,'AADT', activeOnly = True)
+ # Link Daily Flows
+ Auto_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,auto_flow, activeOnly = True)
+ SUT_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,sut_flow, activeOnly = True)
+ MUT_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,mut_flow, activeOnly = True)
+ Tot_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,all_flow, activeOnly = True)
+
+ # Screenline Names for Report
+ SL_NAME = VisumPy.helpers.GetMulti(Visum.Net.Screenlines,"Name", activeOnly = True)
+ SL_NAME = list(dict.fromkeys(SL_NAME))
+
+ # Make Visum list with link data
+ summary_list = [NO, FCLASS, SCRNLINE, LENGTH, Auto_Flow, SUT_Flow, MUT_Flow, Tot_Flow, Auto_Count, SUT_Count, MUT_Count, Tot_Count, AADT]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(summary_list), columns = ['NO', 'FCLASS', 'SCRNLINE', 'LENGTH',
+ 'Auto_Flow', 'SUT_Flow', 'MUT_Flow', 'Tot_Flow',
+ 'Auto_Count', 'SUT_Count', 'MUT_Count', 'Tot_Count','AADT'])
+
+ # Break out SCRNLINE field to separate by commas into individual columns
+ df[['SCRNLINE']] = df[['SCRNLINE']].astype(str)
+ df = pd.concat([df,df['SCRNLINE'].str.split(',', expand = True)], axis = 1)
+ # Change Screenline field names
+ if 1 not in df:
+ df[1] = None
+ df = df.rename(columns = {0:'SCRNLINE1',1:'SCRNLINE2'})
+ # Replace null values with 0 in the screenline fields
+ df['SCRNLINE1'] = df['SCRNLINE1'].replace('',np.nan).fillna(0)
+ df['SCRNLINE2'] = df['SCRNLINE2'].replace('',np.nan).fillna(0)
+
+ # Define custom_sum function to maintain null values when aggregating Counts and Flows by LinkNO
+ def custom_sum(series):
+ # If all values are null, return null; otherwise, return the sum of the values
+ return series.sum() if series.notna().any() else None
+ ## GROUP EACH DATAFRAME BY 'NO' COLUMN TO COMBINE COUNTS ON EACH LINK INTO BOTH DIRECTIONS
+ df = df.groupby('NO').agg(
+ FCLASS =('FCLASS', 'max'),
+ LENGTH =('LENGTH', 'max'),
+ SCRNLINE1 =('SCRNLINE1', 'first'),
+ SCRNLINE2 =('SCRNLINE2', 'first'),
+ Auto_Flow =('Auto_Flow', custom_sum),
+ SUT_Flow =('SUT_Flow', custom_sum),
+ MUT_Flow =('MUT_Flow', custom_sum),
+ Tot_Flow =('Tot_Flow', custom_sum),
+ Auto_Count =('Auto_Count', custom_sum),
+ SUT_Count =('SUT_Count', custom_sum),
+ MUT_Count =('MUT_Count', custom_sum),
+ Tot_Count =('Tot_Count', custom_sum),
+ AADT =('AADT', custom_sum)).reset_index()
+
+ # Drop AADT from df and join in AADT from df_aadt (to have 2-way AADT by link)
+ #df = df.drop('AADT', axis=1)
+ #df = pd.merge(df, df_aadt, on='NO', how='left')
+
+ # Convert FCLASS to integer
+ df[['FCLASS']] = df[['FCLASS']].astype(int)
+
+ # Convert SCRNLINE1 and SCRNLINE2 to Integer
+ df[['SCRNLINE1','SCRNLINE2']] = df[['SCRNLINE1','SCRNLINE2']].astype(float)
+ df[['SCRNLINE1','SCRNLINE2']] = df[['SCRNLINE1','SCRNLINE2']].astype(int)
+
+
+ # Build results dictionary to use as results dataframe to save summary stats for group stats
+ results = {"Segment":['Auto',
+ 'Auto: AADT <5k','Auto: AADT 5-10k','Auto: AADT 10-15k','Auto: AADT 15-20k','Auto: AADT 20-30k','Auto: AADT 30-40k','Auto: AADT 40-50k',
+ 'Auto: LinkType 1', 'Auto: LinkType 2', 'Auto: LinkType 3', 'Auto: LinkType 4', 'Auto: LinkType 5', 'Auto: LinkType 6', 'Auto: LinkType 7',
+ 'Auto: LinkType 8', 'Auto: LinkType 9', 'Auto: LinkType 10', 'Auto: LinkType 11', 'Auto: LinkType 12', 'Auto: LinkType 30', 'Auto: LinkType 32',
+ 'Auto: SL '+SL_NAME[0] ,'Auto: SL '+SL_NAME[1] ,'Auto: SL '+SL_NAME[2] ,'Auto: SL '+SL_NAME[3] ,'Auto: SL '+SL_NAME[4] ,'Auto: SL '+SL_NAME[5] ,'Auto: SL '+SL_NAME[6] ,
+ 'Auto: SL '+SL_NAME[7] ,'Auto: SL '+SL_NAME[8] ,'Auto: SL '+SL_NAME[9] ,'Auto: SL '+SL_NAME[10],'Auto: SL '+SL_NAME[11],'Auto: SL '+SL_NAME[12],
+ 'SUT',
+ 'SUT: AADT <5k','SUT: AADT 5-10k','SUT: AADT 10-15k','SUT: AADT 15-20k','SUT: AADT 20-30k','SUT: AADT 30-40k','SUT: AADT 40-50k',
+ 'SUT: LinkType 1', 'SUT: LinkType 2', 'SUT: LinkType 3', 'SUT: LinkType 4', 'SUT: LinkType 5', 'SUT: LinkType 6', 'SUT: LinkType 7',
+ 'SUT: LinkType 8', 'SUT: LinkType 9', 'SUT: LinkType 10', 'SUT: LinkType 11', 'SUT: LinkType 12', 'SUT: LinkType 30', 'SUT: LinkType 32',
+ 'SUT: SL '+SL_NAME[0] ,'SUT: SL '+SL_NAME[1] ,'SUT: SL '+SL_NAME[2] ,'SUT: SL '+SL_NAME[3] ,'SUT: SL '+SL_NAME[4] ,'SUT: SL '+SL_NAME[5] ,'SUT: SL '+SL_NAME[6] ,
+ 'SUT: SL '+SL_NAME[7] ,'SUT: SL '+SL_NAME[8] ,'SUT: SL '+SL_NAME[9] ,'SUT: SL '+SL_NAME[10],'SUT: SL '+SL_NAME[11],'SUT: SL '+SL_NAME[12],
+ 'MUT',
+ 'MUT: AADT <5k','MUT: AADT 5-10k','MUT: AADT 10-15k','MUT: AADT 15-20k','MUT: AADT 20-30k','MUT: AADT 30-40k','MUT: AADT 40-50k',
+ 'MUT: LinkType 1', 'MUT: LinkType 2', 'MUT: LinkType 3', 'MUT: LinkType 4', 'MUT: LinkType 5', 'MUT: LinkType 6', 'MUT: LinkType 7',
+ 'MUT: LinkType 8', 'MUT: LinkType 9', 'MUT: LinkType 10', 'MUT: LinkType 11', 'MUT: LinkType 12', 'MUT: LinkType 30', 'MUT: LinkType 32',
+ 'MUT: SL '+SL_NAME[0] ,'MUT: SL '+SL_NAME[1] ,'MUT: SL '+SL_NAME[2] ,'MUT: SL '+SL_NAME[3] ,'MUT: SL '+SL_NAME[4] ,'MUT: SL '+SL_NAME[5] ,'MUT: SL '+SL_NAME[6] ,
+ 'MUT: SL '+SL_NAME[7],'MUT: SL '+SL_NAME[8] ,'MUT: SL '+SL_NAME[9] ,'MUT: SL '+SL_NAME[10],'MUT: SL '+SL_NAME[11],'MUT: SL '+SL_NAME[12],
+ 'All Modes',
+ 'All Modes: AADT <5k','All Modes: AADT 5-10k','All Modes: AADT 10-15k','All Modes: AADT 15-20k','All Modes: AADT 20-30k','All Modes: AADT 30-40k','All Modes: AADT 40-50k',
+ 'All Modes: LinkType 1', 'All Modes: LinkType 2', 'All Modes: LinkType 3', 'All Modes: LinkType 4', 'All Modes: LinkType 5', 'All Modes: LinkType 6', 'All Modes: LinkType 7',
+ 'All Modes: LinkType 8', 'All Modes: LinkType 9', 'All Modes: LinkType 10', 'All Modes: LinkType 11', 'All Modes: LinkType 12', 'All Modes: LinkType 30', 'All Modes: LinkType 32',
+ 'All Modes: SL '+SL_NAME[0] ,'All Modes: SL '+SL_NAME[1] ,'All Modes: SL '+SL_NAME[2] ,'All Modes: SL '+SL_NAME[3] ,'All Modes: SL '+SL_NAME[4] ,'All Modes: SL '+SL_NAME[5] ,
+ 'All Modes: SL '+SL_NAME[6] ,'All Modes: SL '+SL_NAME[7] ,'All Modes: SL '+SL_NAME[8] ,'All Modes: SL '+SL_NAME[9] ,'All Modes: SL '+SL_NAME[10],'All Modes: SL '+SL_NAME[11],
+ 'All Modes: SL '+SL_NAME[12]
+ ]}
+
+
+
+ # Plug results dictionary into results_df dataframe
+ results_df = pd.DataFrame(data = results)
+
+ # Add stats columns
+ results_df['Percent Error'] = None
+ results_df['Percent RMSE'] = None
+ results_df['Total VMT'] = None
+ results_df['Total VHT'] = None
+ results_df['Number of Observations'] = None
+ results_df['Sum of Counts'] = None
+ results_df['Mean of Counts'] = None
+ results_df['Median of Counts'] = None
+ results_df['Count VMT, Links with Counts'] = None
+ results_df['Modeled VMT, Links with Counts'] = None
+
+
+ # For links with counts only, used for Pct. Error and Pct. RMSE
+ # Filter out links where count is null or 0 and by each condition
+ # All Links with Auto Counts
+ count_df = df[df['Auto_Count'].notna()]
+
+ # By AADT Volume
+ under_5k_df = count_df[(count_df['AADT'] < 5000)]
+ btwn_5_10k_df = count_df[(count_df['AADT'] >= 5000) & (count_df['AADT'] < 10000)]
+ btwn_10_15k_df = count_df[(count_df['AADT'] >= 10000) & (count_df['AADT'] < 15000)]
+ btwn_15_20k_df = count_df[(count_df['AADT'] >= 15000) & (count_df['AADT'] < 20000)]
+ btwn_20_30k_df = count_df[(count_df['AADT'] >= 20000) & (count_df['AADT'] < 30000)]
+ btwn_30_40k_df = count_df[(count_df['AADT'] >= 30000) & (count_df['AADT'] < 40000)]
+ btwn_40_50k_df = count_df[(count_df['AADT'] >= 40000) & (count_df['AADT'] < 50000)]
+ #over_50k_df = count_df[(count_df['AADT'] >= 50000)]
+ # By Functional Class
+ fc1_df = count_df[(count_df['FCLASS'] == 1)]
+ fc2_df = count_df[(count_df['FCLASS'] == 2)]
+ fc3_df = count_df[(count_df['FCLASS'] == 3)]
+ fc4_df = count_df[(count_df['FCLASS'] == 4)]
+ fc5_df = count_df[(count_df['FCLASS'] == 5)]
+ fc6_df = count_df[(count_df['FCLASS'] == 6)]
+ fc7_df = count_df[(count_df['FCLASS'] == 7)]
+ fc8_df = count_df[(count_df['FCLASS'] == 8)]
+ fc9_df = count_df[(count_df['FCLASS'] == 9)]
+ fc10_df = count_df[(count_df['FCLASS'] == 10)]
+ fc11_df = count_df[(count_df['FCLASS'] == 11)]
+ fc12_df = count_df[(count_df['FCLASS'] == 12)]
+ fc30_df = count_df[(count_df['FCLASS'] == 30)]
+ fc32_df = count_df[(count_df['FCLASS'] == 32)]
+
+ # By Screenline
+ sl_1_df = count_df[(count_df['SCRNLINE1'] == 1) | (count_df['SCRNLINE2'] == 1)]
+ sl_2_df = count_df[(count_df['SCRNLINE1'] == 2) | (count_df['SCRNLINE2'] == 2)]
+ sl_3_df = count_df[(count_df['SCRNLINE1'] == 3) | (count_df['SCRNLINE2'] == 3)]
+ sl_4_df = count_df[(count_df['SCRNLINE1'] == 4) | (count_df['SCRNLINE2'] == 4)]
+ sl_5_df = count_df[(count_df['SCRNLINE1'] == 5) | (count_df['SCRNLINE2'] == 5)]
+ sl_6_df = count_df[(count_df['SCRNLINE1'] == 6) | (count_df['SCRNLINE2'] == 6)]
+ sl_7_df = count_df[(count_df['SCRNLINE1'] == 7) | (count_df['SCRNLINE2'] == 7)]
+ sl_8_df = count_df[(count_df['SCRNLINE1'] == 8) | (count_df['SCRNLINE2'] == 8)]
+ sl_9_df = count_df[(count_df['SCRNLINE1'] == 9) | (count_df['SCRNLINE2'] == 9)]
+ sl_10_df = count_df[(count_df['SCRNLINE1'] == 10) | (count_df['SCRNLINE2'] == 10)]
+ sl_11_df = count_df[(count_df['SCRNLINE1'] == 11) | (count_df['SCRNLINE2'] == 11)]
+ sl_12_df = count_df[(count_df['SCRNLINE1'] == 12) | (count_df['SCRNLINE2'] == 12)]
+ sl_13_df = count_df[(count_df['SCRNLINE1'] == 13) | (count_df['SCRNLINE2'] == 13)]
+ #sl_14_df = count_df[(count_df['SCRNLINE1'] == 14) | (count_df['SCRNLINE2'] == 14)]
+ #sl_15_df = count_df[(count_df['SCRNLINE1'] == 15) | (count_df['SCRNLINE2'] == 15)]
+ #sl_16_df = count_df[(count_df['SCRNLINE1'] == 16) | (count_df['SCRNLINE2'] == 16)]
+ #sl_17_df = count_df[(count_df['SCRNLINE1'] == 17) | (count_df['SCRNLINE2'] == 17)]
+ #sl_18_df = count_df[(count_df['SCRNLINE1'] == 18) | (count_df['SCRNLINE2'] == 18)]
+ #sl_19_df = count_df[(count_df['SCRNLINE1'] == 19) | (count_df['SCRNLINE2'] == 19)]
+ #sl_20_df = count_df[(count_df['SCRNLINE1'] == 20) | (count_df['SCRNLINE2'] == 20)]
+ #sl_21_df = count_df[(count_df['SCRNLINE1'] == 21) | (count_df['SCRNLINE2'] == 21)]
+ #sl_22_df = count_df[(count_df['SCRNLINE1'] == 22) | (count_df['SCRNLINE2'] == 22)]
+ #sl_23_df = count_df[(count_df['SCRNLINE1'] == 23) | (count_df['SCRNLINE2'] == 23)]
+ #sl_24_df = count_df[(count_df['SCRNLINE1'] == 24) | (count_df['SCRNLINE2'] == 24)]
+ #sl_25_df = count_df[(count_df['SCRNLINE1'] == 25) | (count_df['SCRNLINE2'] == 25)]
+ #sl_26_df = count_df[(count_df['SCRNLINE1'] == 26) | (count_df['SCRNLINE2'] == 26)]
+
+ # Build list of dataframes to loop thru
+ auto_df_list = [count_df,#internal_df,external_df,
+ under_5k_df,btwn_5_10k_df,btwn_10_15k_df,btwn_15_20k_df,btwn_20_30k_df,btwn_30_40k_df,btwn_40_50k_df,
+ fc1_df,fc2_df,fc3_df,fc4_df,fc5_df,fc6_df,fc7_df,fc8_df,fc9_df,fc10_df,fc11_df,fc12_df,fc30_df,fc32_df,
+ sl_1_df,sl_2_df,sl_3_df,sl_4_df,sl_5_df,sl_6_df,sl_7_df,sl_8_df,sl_9_df,sl_10_df,sl_11_df,sl_12_df,sl_13_df]
+
+
+
+ # All Links with SUT Counts
+ count_df = df[df['SUT_Count'].notna()]
+ # By AADT Volume
+ under_5k_df = count_df[(count_df['AADT'] < 5000)]
+ btwn_5_10k_df = count_df[(count_df['AADT'] >= 5000) & (count_df['AADT'] < 10000)]
+ btwn_10_15k_df = count_df[(count_df['AADT'] >= 10000) & (count_df['AADT'] < 15000)]
+ btwn_15_20k_df = count_df[(count_df['AADT'] >= 15000) & (count_df['AADT'] < 20000)]
+ btwn_20_30k_df = count_df[(count_df['AADT'] >= 20000) & (count_df['AADT'] < 30000)]
+ btwn_30_40k_df = count_df[(count_df['AADT'] >= 30000) & (count_df['AADT'] < 40000)]
+ btwn_40_50k_df = count_df[(count_df['AADT'] >= 40000) & (count_df['AADT'] < 50000)]
+ #over_50k_df = count_df[(count_df['AADT'] >= 50000)]
+ # By Functional Class
+ fc1_df = count_df[(count_df['FCLASS'] == 1)]
+ fc2_df = count_df[(count_df['FCLASS'] == 2)]
+ fc3_df = count_df[(count_df['FCLASS'] == 3)]
+ fc4_df = count_df[(count_df['FCLASS'] == 4)]
+ fc5_df = count_df[(count_df['FCLASS'] == 5)]
+ fc6_df = count_df[(count_df['FCLASS'] == 6)]
+ fc7_df = count_df[(count_df['FCLASS'] == 7)]
+ fc8_df = count_df[(count_df['FCLASS'] == 8)]
+ fc9_df = count_df[(count_df['FCLASS'] == 9)]
+ fc10_df = count_df[(count_df['FCLASS'] == 10)]
+ fc11_df = count_df[(count_df['FCLASS'] == 11)]
+ fc12_df = count_df[(count_df['FCLASS'] == 12)]
+ fc30_df = count_df[(count_df['FCLASS'] == 30)]
+ fc32_df = count_df[(count_df['FCLASS'] == 32)]
+ # By Screenline
+ sl_1_df = count_df[(count_df['SCRNLINE1'] == 1) | (count_df['SCRNLINE2'] == 1)]
+ sl_2_df = count_df[(count_df['SCRNLINE1'] == 2) | (count_df['SCRNLINE2'] == 2)]
+ sl_3_df = count_df[(count_df['SCRNLINE1'] == 3) | (count_df['SCRNLINE2'] == 3)]
+ sl_4_df = count_df[(count_df['SCRNLINE1'] == 4) | (count_df['SCRNLINE2'] == 4)]
+ sl_5_df = count_df[(count_df['SCRNLINE1'] == 5) | (count_df['SCRNLINE2'] == 5)]
+ sl_6_df = count_df[(count_df['SCRNLINE1'] == 6) | (count_df['SCRNLINE2'] == 6)]
+ sl_7_df = count_df[(count_df['SCRNLINE1'] == 7) | (count_df['SCRNLINE2'] == 7)]
+ sl_8_df = count_df[(count_df['SCRNLINE1'] == 8) | (count_df['SCRNLINE2'] == 8)]
+ sl_9_df = count_df[(count_df['SCRNLINE1'] == 9) | (count_df['SCRNLINE2'] == 9)]
+ sl_10_df = count_df[(count_df['SCRNLINE1'] == 10) | (count_df['SCRNLINE2'] == 10)]
+ sl_11_df = count_df[(count_df['SCRNLINE1'] == 11) | (count_df['SCRNLINE2'] == 11)]
+ sl_12_df = count_df[(count_df['SCRNLINE1'] == 12) | (count_df['SCRNLINE2'] == 12)]
+ sl_13_df = count_df[(count_df['SCRNLINE1'] == 13) | (count_df['SCRNLINE2'] == 13)]
+ #sl_14_df = count_df[(count_df['SCRNLINE1'] == 14) | (count_df['SCRNLINE2'] == 14)]
+ #sl_15_df = count_df[(count_df['SCRNLINE1'] == 15) | (count_df['SCRNLINE2'] == 15)]
+ #sl_16_df = count_df[(count_df['SCRNLINE1'] == 16) | (count_df['SCRNLINE2'] == 16)]
+ #sl_17_df = count_df[(count_df['SCRNLINE1'] == 17) | (count_df['SCRNLINE2'] == 17)]
+ #sl_18_df = count_df[(count_df['SCRNLINE1'] == 18) | (count_df['SCRNLINE2'] == 18)]
+ #sl_19_df = count_df[(count_df['SCRNLINE1'] == 19) | (count_df['SCRNLINE2'] == 19)]
+ #sl_20_df = count_df[(count_df['SCRNLINE1'] == 20) | (count_df['SCRNLINE2'] == 20)]
+ #sl_21_df = count_df[(count_df['SCRNLINE1'] == 21) | (count_df['SCRNLINE2'] == 21)]
+ #sl_22_df = count_df[(count_df['SCRNLINE1'] == 22) | (count_df['SCRNLINE2'] == 22)]
+ #sl_23_df = count_df[(count_df['SCRNLINE1'] == 23) | (count_df['SCRNLINE2'] == 23)]
+ #sl_24_df = count_df[(count_df['SCRNLINE1'] == 24) | (count_df['SCRNLINE2'] == 24)]
+ #sl_25_df = count_df[(count_df['SCRNLINE1'] == 25) | (count_df['SCRNLINE2'] == 25)]
+ #sl_26_df = count_df[(count_df['SCRNLINE1'] == 26) | (count_df['SCRNLINE2'] == 26)]
+
+ # Build list of dataframes to loop thru
+ sut_df_list = [count_df,#internal_df,external_df,
+ under_5k_df,btwn_5_10k_df,btwn_10_15k_df,btwn_15_20k_df,btwn_20_30k_df,btwn_30_40k_df,btwn_40_50k_df,
+ fc1_df,fc2_df,fc3_df,fc4_df,fc5_df,fc6_df,fc7_df,fc8_df,fc9_df,fc10_df,fc11_df,fc12_df,fc30_df,fc32_df,
+ sl_1_df,sl_2_df,sl_3_df,sl_4_df,sl_5_df,sl_6_df,sl_7_df,sl_8_df,sl_9_df,sl_10_df,sl_11_df,sl_12_df,sl_13_df]
+
+
+ # All Links with MUT Counts
+ count_df = df[df['MUT_Count'].notna()]
+ # By AADT Volume
+ under_5k_df = count_df[(count_df['AADT'] < 5000)]
+ btwn_5_10k_df = count_df[(count_df['AADT'] >= 5000) & (count_df['AADT'] < 10000)]
+ btwn_10_15k_df = count_df[(count_df['AADT'] >= 10000) & (count_df['AADT'] < 15000)]
+ btwn_15_20k_df = count_df[(count_df['AADT'] >= 15000) & (count_df['AADT'] < 20000)]
+ btwn_20_30k_df = count_df[(count_df['AADT'] >= 20000) & (count_df['AADT'] < 30000)]
+ btwn_30_40k_df = count_df[(count_df['AADT'] >= 30000) & (count_df['AADT'] < 40000)]
+ btwn_40_50k_df = count_df[(count_df['AADT'] >= 40000) & (count_df['AADT'] < 50000)]
+ #over_50k_df = count_df[(count_df['AADT'] >= 50000)]
+ # By Functional Class
+ fc1_df = count_df[(count_df['FCLASS'] == 1)]
+ fc2_df = count_df[(count_df['FCLASS'] == 2)]
+ fc3_df = count_df[(count_df['FCLASS'] == 3)]
+ fc4_df = count_df[(count_df['FCLASS'] == 4)]
+ fc5_df = count_df[(count_df['FCLASS'] == 5)]
+ fc6_df = count_df[(count_df['FCLASS'] == 6)]
+ fc7_df = count_df[(count_df['FCLASS'] == 7)]
+ fc8_df = count_df[(count_df['FCLASS'] == 8)]
+ fc9_df = count_df[(count_df['FCLASS'] == 9)]
+ fc10_df = count_df[(count_df['FCLASS'] == 10)]
+ fc11_df = count_df[(count_df['FCLASS'] == 11)]
+ fc12_df = count_df[(count_df['FCLASS'] == 12)]
+ fc30_df = count_df[(count_df['FCLASS'] == 30)]
+ fc32_df = count_df[(count_df['FCLASS'] == 32)]
+ # By Screenline
+ sl_1_df = count_df[(count_df['SCRNLINE1'] == 1) | (count_df['SCRNLINE2'] == 1)]
+ sl_2_df = count_df[(count_df['SCRNLINE1'] == 2) | (count_df['SCRNLINE2'] == 2)]
+ sl_3_df = count_df[(count_df['SCRNLINE1'] == 3) | (count_df['SCRNLINE2'] == 3)]
+ sl_4_df = count_df[(count_df['SCRNLINE1'] == 4) | (count_df['SCRNLINE2'] == 4)]
+ sl_5_df = count_df[(count_df['SCRNLINE1'] == 5) | (count_df['SCRNLINE2'] == 5)]
+ sl_6_df = count_df[(count_df['SCRNLINE1'] == 6) | (count_df['SCRNLINE2'] == 6)]
+ sl_7_df = count_df[(count_df['SCRNLINE1'] == 7) | (count_df['SCRNLINE2'] == 7)]
+ sl_8_df = count_df[(count_df['SCRNLINE1'] == 8) | (count_df['SCRNLINE2'] == 8)]
+ sl_9_df = count_df[(count_df['SCRNLINE1'] == 9) | (count_df['SCRNLINE2'] == 9)]
+ sl_10_df = count_df[(count_df['SCRNLINE1'] == 10) | (count_df['SCRNLINE2'] == 10)]
+ sl_11_df = count_df[(count_df['SCRNLINE1'] == 11) | (count_df['SCRNLINE2'] == 11)]
+ sl_12_df = count_df[(count_df['SCRNLINE1'] == 12) | (count_df['SCRNLINE2'] == 12)]
+ sl_13_df = count_df[(count_df['SCRNLINE1'] == 13) | (count_df['SCRNLINE2'] == 13)]
+ #sl_14_df = count_df[(count_df['SCRNLINE1'] == 14) | (count_df['SCRNLINE2'] == 14)]
+ #sl_15_df = count_df[(count_df['SCRNLINE1'] == 15) | (count_df['SCRNLINE2'] == 15)]
+ #sl_16_df = count_df[(count_df['SCRNLINE1'] == 16) | (count_df['SCRNLINE2'] == 16)]
+ #sl_17_df = count_df[(count_df['SCRNLINE1'] == 17) | (count_df['SCRNLINE2'] == 17)]
+ #sl_18_df = count_df[(count_df['SCRNLINE1'] == 18) | (count_df['SCRNLINE2'] == 18)]
+ #sl_19_df = count_df[(count_df['SCRNLINE1'] == 19) | (count_df['SCRNLINE2'] == 19)]
+ #sl_20_df = count_df[(count_df['SCRNLINE1'] == 20) | (count_df['SCRNLINE2'] == 20)]
+ #sl_21_df = count_df[(count_df['SCRNLINE1'] == 21) | (count_df['SCRNLINE2'] == 21)]
+ #sl_22_df = count_df[(count_df['SCRNLINE1'] == 22) | (count_df['SCRNLINE2'] == 22)]
+ #sl_23_df = count_df[(count_df['SCRNLINE1'] == 23) | (count_df['SCRNLINE2'] == 23)]
+ #sl_24_df = count_df[(count_df['SCRNLINE1'] == 24) | (count_df['SCRNLINE2'] == 24)]
+ #sl_25_df = count_df[(count_df['SCRNLINE1'] == 25) | (count_df['SCRNLINE2'] == 25)]
+ #sl_26_df = count_df[(count_df['SCRNLINE1'] == 26) | (count_df['SCRNLINE2'] == 26)]
+
+
+ # Build list of dataframes to loop thru
+ mut_df_list = [count_df,#internal_df,external_df,
+ under_5k_df,btwn_5_10k_df,btwn_10_15k_df,btwn_15_20k_df,btwn_20_30k_df,btwn_30_40k_df,btwn_40_50k_df,
+ fc1_df,fc2_df,fc3_df,fc4_df,fc5_df,fc6_df,fc7_df,fc8_df,fc9_df,fc10_df,fc11_df,fc12_df,fc30_df,fc32_df,
+ sl_1_df,sl_2_df,sl_3_df,sl_4_df,sl_5_df,sl_6_df,sl_7_df,sl_8_df,sl_9_df,sl_10_df,sl_11_df,sl_12_df,sl_13_df]
+
+ # All Links with All Modes Counts
+ count_df = df[df['Tot_Count'].notna()]
+ # By AADT Volume
+ under_5k_df = count_df[(count_df['AADT'] < 5000)]
+ btwn_5_10k_df = count_df[(count_df['AADT'] >= 5000) & (count_df['AADT'] < 10000)]
+ btwn_10_15k_df = count_df[(count_df['AADT'] >= 10000) & (count_df['AADT'] < 15000)]
+ btwn_15_20k_df = count_df[(count_df['AADT'] >= 15000) & (count_df['AADT'] < 20000)]
+ btwn_20_30k_df = count_df[(count_df['AADT'] >= 20000) & (count_df['AADT'] < 30000)]
+ btwn_30_40k_df = count_df[(count_df['AADT'] >= 30000) & (count_df['AADT'] < 40000)]
+ btwn_40_50k_df = count_df[(count_df['AADT'] >= 40000) & (count_df['AADT'] < 50000)]
+ # By Functional Class
+ fc1_df = count_df[(count_df['FCLASS'] == 1)]
+ fc2_df = count_df[(count_df['FCLASS'] == 2)]
+ fc3_df = count_df[(count_df['FCLASS'] == 3)]
+ fc4_df = count_df[(count_df['FCLASS'] == 4)]
+ fc5_df = count_df[(count_df['FCLASS'] == 5)]
+ fc6_df = count_df[(count_df['FCLASS'] == 6)]
+ fc7_df = count_df[(count_df['FCLASS'] == 7)]
+ fc8_df = count_df[(count_df['FCLASS'] == 8)]
+ fc9_df = count_df[(count_df['FCLASS'] == 9)]
+ fc10_df = count_df[(count_df['FCLASS'] == 10)]
+ fc11_df = count_df[(count_df['FCLASS'] == 11)]
+ fc12_df = count_df[(count_df['FCLASS'] == 12)]
+ fc30_df = count_df[(count_df['FCLASS'] == 30)]
+ fc32_df = count_df[(count_df['FCLASS'] == 32)]
+ # By Screenline
+ sl_1_df = count_df[(count_df['SCRNLINE1'] == 1) | (count_df['SCRNLINE2'] == 1)]
+ sl_2_df = count_df[(count_df['SCRNLINE1'] == 2) | (count_df['SCRNLINE2'] == 2)]
+ sl_3_df = count_df[(count_df['SCRNLINE1'] == 3) | (count_df['SCRNLINE2'] == 3)]
+ sl_4_df = count_df[(count_df['SCRNLINE1'] == 4) | (count_df['SCRNLINE2'] == 4)]
+ sl_5_df = count_df[(count_df['SCRNLINE1'] == 5) | (count_df['SCRNLINE2'] == 5)]
+ sl_6_df = count_df[(count_df['SCRNLINE1'] == 6) | (count_df['SCRNLINE2'] == 6)]
+ sl_7_df = count_df[(count_df['SCRNLINE1'] == 7) | (count_df['SCRNLINE2'] == 7)]
+ sl_8_df = count_df[(count_df['SCRNLINE1'] == 8) | (count_df['SCRNLINE2'] == 8)]
+ sl_9_df = count_df[(count_df['SCRNLINE1'] == 9) | (count_df['SCRNLINE2'] == 9)]
+ sl_10_df = count_df[(count_df['SCRNLINE1'] == 10) | (count_df['SCRNLINE2'] == 10)]
+ sl_11_df = count_df[(count_df['SCRNLINE1'] == 11) | (count_df['SCRNLINE2'] == 11)]
+ sl_12_df = count_df[(count_df['SCRNLINE1'] == 12) | (count_df['SCRNLINE2'] == 12)]
+ sl_13_df = count_df[(count_df['SCRNLINE1'] == 13) | (count_df['SCRNLINE2'] == 13)]
+ #sl_14_df = count_df[(count_df['SCRNLINE1'] == 14) | (count_df['SCRNLINE2'] == 14)]
+ #sl_15_df = count_df[(count_df['SCRNLINE1'] == 15) | (count_df['SCRNLINE2'] == 15)]
+ #sl_16_df = count_df[(count_df['SCRNLINE1'] == 16) | (count_df['SCRNLINE2'] == 16)]
+ #sl_17_df = count_df[(count_df['SCRNLINE1'] == 17) | (count_df['SCRNLINE2'] == 17)]
+ #sl_18_df = count_df[(count_df['SCRNLINE1'] == 18) | (count_df['SCRNLINE2'] == 18)]
+ #sl_19_df = count_df[(count_df['SCRNLINE1'] == 19) | (count_df['SCRNLINE2'] == 19)]
+ #sl_20_df = count_df[(count_df['SCRNLINE1'] == 20) | (count_df['SCRNLINE2'] == 20)]
+ #sl_21_df = count_df[(count_df['SCRNLINE1'] == 21) | (count_df['SCRNLINE2'] == 21)]
+ #sl_22_df = count_df[(count_df['SCRNLINE1'] == 22) | (count_df['SCRNLINE2'] == 22)]
+ #sl_23_df = count_df[(count_df['SCRNLINE1'] == 23) | (count_df['SCRNLINE2'] == 23)]
+ #sl_24_df = count_df[(count_df['SCRNLINE1'] == 24) | (count_df['SCRNLINE2'] == 24)]
+ #sl_25_df = count_df[(count_df['SCRNLINE1'] == 25) | (count_df['SCRNLINE2'] == 25)]
+ #sl_26_df = count_df[(count_df['SCRNLINE1'] == 26) | (count_df['SCRNLINE2'] == 26)]
+
+
+ # Build list of dataframes to loop thru
+ allmodes_df_list = [count_df,#internal_df,external_df,
+ under_5k_df,btwn_5_10k_df,btwn_10_15k_df,btwn_15_20k_df,btwn_20_30k_df,btwn_30_40k_df,btwn_40_50k_df,
+ fc1_df,fc2_df,fc3_df,fc4_df,fc5_df,fc6_df,fc7_df,fc8_df,fc9_df,fc10_df,fc11_df,fc12_df,fc30_df,fc32_df,
+ sl_1_df,sl_2_df,sl_3_df,sl_4_df,sl_5_df,sl_6_df,sl_7_df,sl_8_df,sl_9_df,sl_10_df,sl_11_df,sl_12_df,sl_13_df]
+
+
+ # Add squared error column to each df
+ # Auto
+ for i in auto_df_list:
+ i['Auto_SqError'] = (i.Auto_Flow - i.Auto_Count)**2
+ # SUT
+ for i in sut_df_list:
+ i['SUT_SqError'] = (i.SUT_Flow - i.SUT_Count)**2
+ # MUT
+ for i in mut_df_list:
+ i['MUT_SqError'] = (i.MUT_Flow - i.MUT_Count)**2
+ # All Modes
+ for i in allmodes_df_list:
+ i['Tot_SqError'] = (i.Tot_Flow - i.Tot_Count)**2
+
+ # Create attributes for 'y' in the next section
+ auto_loc = 0
+ sut_loc = auto_loc + len(sut_df_list)
+ mut_loc = sut_loc + len(mut_df_list)
+ allmodes_loc = mut_loc + len(allmodes_df_list)
+
+
+ # Calculate Auto pct error and pct rmse from each dataframe and save in results dataframe
+ y = auto_loc
+ for i in auto_df_list:
+ if len(i) == 0:
+ results_df.at[y,"Number of Observations"] = 0
+ y = y + 1
+ #continue
+ else:
+ results_df.at[y,"Percent Error"] = pct_error(i.Auto_Count,i.Auto_Flow)
+ results_df.at[y,"Percent RMSE"] = pct_rmse(i.Auto_Count,i.Auto_SqError)
+ results_df.at[y,"Number of Observations"] = len(i)
+ results_df.at[y,"Sum of Counts"] = np.sum(i.Auto_Count)
+ results_df.at[y,"Mean of Counts"] = np.mean(i.Auto_Count)
+ results_df.at[y,"Median of Counts"] = np.median(i.Auto_Count)
+ results_df.at[y,"Count VMT, Links with Counts"] = vmt(i.Auto_Count,i.LENGTH)
+ results_df.at[y,'Modeled VMT, Links with Counts'] = vmt(i.Auto_Flow,i.LENGTH)
+ y = y + 1
+
+ # Calculate SUT pct error and pct rmse from each dataframe and save in results dataframe
+ y = sut_loc
+ for i in sut_df_list:
+ if len(i) == 0:
+ results_df.at[y,"Number of Observations"] = 0
+ y = y + 1
+ #continue
+ else:
+ results_df.at[y,"Percent Error"] = pct_error(i.SUT_Count,i.SUT_Flow)
+ results_df.at[y,"Percent RMSE"] = pct_rmse(i.SUT_Count, i.SUT_SqError)
+ results_df.at[y,"Number of Observations"] = len(i)
+ results_df.at[y,"Sum of Counts"] = np.sum(i.SUT_Count)
+ results_df.at[y,"Mean of Counts"] = np.mean(i.SUT_Count)
+ results_df.at[y,"Median of Counts"] = np.median(i.SUT_Count)
+ results_df.at[y,"Count VMT, Links with Counts"] = vmt(i.SUT_Count,i.LENGTH)
+ results_df.at[y,'Modeled VMT, Links with Counts'] = vmt(i.SUT_Flow,i.LENGTH)
+ y = y + 1
+
+ # Calculate MUT pct error and pct rmse from each dataframe and save in results dataframe
+ y = mut_loc
+ for i in mut_df_list:
+ if len(i) == 0:
+ results_df.at[y,"Number of Observations"] = 0
+ y = y + 1
+ #continue
+ else:
+ results_df.at[y,"Percent Error"] = pct_error(i.MUT_Count,i.MUT_Flow)
+ results_df.at[y,"Percent RMSE"] = pct_rmse(i.MUT_Count, i.MUT_SqError)
+ results_df.at[y,"Number of Observations"] = len(i)
+ results_df.at[y,"Sum of Counts"] = np.sum(i.MUT_Count)
+ results_df.at[y,"Mean of Counts"] = np.mean(i.MUT_Count)
+ results_df.at[y,"Median of Counts"] = np.median(i.MUT_Count)
+ results_df.at[y,"Count VMT, Links with Counts"] = vmt(i.MUT_Count,i.LENGTH)
+ results_df.at[y,'Modeled VMT, Links with Counts'] = vmt(i.MUT_Flow,i.LENGTH)
+ y = y + 1
+
+ # Calculate All Modes pct error and pct rmse from each dataframe and save in results dataframe
+ y = allmodes_loc
+ for i in allmodes_df_list:
+ if len(i) == 0:
+ results_df.at[y,"Number of Observations"] = 0
+ y = y + 1
+ #continue
+ else:
+ results_df.at[y,"Percent Error"] = pct_error(i.Tot_Count,i.Tot_Flow)
+ results_df.at[y,"Percent RMSE"] = pct_rmse(i.Tot_Count,i.Tot_SqError)
+ results_df.at[y,"Number of Observations"] = len(i)
+ results_df.at[y,"Sum of Counts"] = np.sum(i.Tot_Count)
+ results_df.at[y,"Mean of Counts"] = np.mean(i.Tot_Count)
+ results_df.at[y,"Median of Counts"] = np.median(i.Tot_Count)
+ results_df.at[y,"Count VMT, Links with Counts"] = vmt(i.Tot_Count,i.LENGTH)
+ results_df.at[y,'Modeled VMT, Links with Counts'] = vmt(i.Tot_Flow,i.LENGTH)
+ y = y + 1
+
+
+ # Total VMT and Total VHT
+
+ # Import ID fields and fields with Counts and Flows
+ # Link ID fields
+ NO = VisumPy.helpers.GetMulti(Visum.Net.Links,"No", activeOnly = True)
+ FCLASS = VisumPy.helpers.GetMulti(Visum.Net.Links,"TYPENO", activeOnly = True)
+ LENGTH = VisumPy.helpers.GetMulti(Visum.Net.Links,"Length", activeOnly = True)
+ SCRNLINE = VisumPy.helpers.GetMulti(Visum.Net.Links,r"CONCATENATE:SCREENLINES\CODE", activeOnly = True)
+ # Pull CONGTIME Auto by period, Length, and Flows by Period for Total VMT/Total VHT Calculations
+ CONGTIME_AM_C = VisumPy.helpers.GetMulti(Visum.Net.Links,"AMTCUR_A", activeOnly = True)
+ CONGTIME_PM_C = VisumPy.helpers.GetMulti(Visum.Net.Links,"PMTCUR_A", activeOnly = True)
+ CONGTIME_OP_C = VisumPy.helpers.GetMulti(Visum.Net.Links,"OPTCUR_A", activeOnly = True)
+ # Pull CONGTIME SUT by period, Length, and Flows by Period for Total VMT/Total VHT Calculations
+ CONGTIME_AM_S = VisumPy.helpers.GetMulti(Visum.Net.Links,"AMTCUR_MED", activeOnly = True)
+ CONGTIME_PM_S = VisumPy.helpers.GetMulti(Visum.Net.Links,"PMTCUR_MED", activeOnly = True)
+ CONGTIME_OP_S = VisumPy.helpers.GetMulti(Visum.Net.Links,"OPTCUR_MED", activeOnly = True)
+ # Pull CONGTIME SUT by period, Length, and Flows by Period for Total VMT/Total VHT Calculations
+ CONGTIME_AM_M = VisumPy.helpers.GetMulti(Visum.Net.Links,"AMTCUR_HVY", activeOnly = True)
+ CONGTIME_PM_M = VisumPy.helpers.GetMulti(Visum.Net.Links,"PMTCUR_HVY", activeOnly = True)
+ CONGTIME_OP_M = VisumPy.helpers.GetMulti(Visum.Net.Links,"OPTCUR_HVY", activeOnly = True)
+
+ # Link Flows by Period
+ # AM
+ sov_flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"AMSVOL", activeOnly = True)
+ hov_flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"AMHVOL", activeOnly = True)
+
+ AM_Auto_Flow = np.add(sov_flow,hov_flow)
+ AM_SUT_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"AMMTVOL", activeOnly = True)
+ AM_MUT_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"AMHTVOL", activeOnly = True)
+ AM_Tot_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"AMVOLPCU_N", activeOnly = True)
+ # OP
+ sov_flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"PMSVOL", activeOnly = True)
+ hov_flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"PMHVOL", activeOnly = True)
+
+ PM_Auto_Flow = np.add(sov_flow,hov_flow)
+ PM_SUT_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"PMMTVOL", activeOnly = True)
+ PM_MUT_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"PMHTVOL", activeOnly = True)
+ PM_Tot_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"PMVOLPCU_N", activeOnly = True)
+ # OP
+ sov_flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"OPSVOL", activeOnly = True)
+ hov_flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"OPHVOL", activeOnly = True)
+
+ OP_Auto_Flow = np.add(sov_flow,hov_flow)
+ OP_SUT_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"OPMTVOL", activeOnly = True)
+ OP_MUT_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"OPHTVOL", activeOnly = True)
+ OP_Tot_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,"OPVOLPCU_N", activeOnly = True)
+
+ # Period for functions
+ Auto_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,auto_flow, activeOnly = True)
+ SUT_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,sut_flow, activeOnly = True)
+ MUT_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,mut_flow, activeOnly = True)
+ Tot_Flow = VisumPy.helpers.GetMulti(Visum.Net.Links,all_flow, activeOnly = True)
+
+
+ # Make Visum list with link data
+ summary_list = [NO, FCLASS, LENGTH, SCRNLINE,
+ CONGTIME_AM_C, CONGTIME_PM_C, CONGTIME_OP_C,
+ CONGTIME_AM_S, CONGTIME_PM_S, CONGTIME_OP_S,
+ CONGTIME_AM_M, CONGTIME_PM_M, CONGTIME_OP_M,
+ AM_Auto_Flow, AM_SUT_Flow, AM_MUT_Flow, AM_Tot_Flow,
+ PM_Auto_Flow, PM_SUT_Flow, PM_MUT_Flow, PM_Tot_Flow,
+ OP_Auto_Flow, OP_SUT_Flow, OP_MUT_Flow, OP_Tot_Flow,
+ Auto_Flow, SUT_Flow, MUT_Flow, Tot_Flow]
+
+ # Put Visum link list into dataframe
+ df_all = pd.DataFrame(np.column_stack(summary_list), columns = ['NO', 'FCLASS', 'LENGTH', 'SCRNLINE',
+ 'CONGTIME_AM_C', 'CONGTIME_PM_C', 'CONGTIME_OP_C',
+ 'CONGTIME_AM_S', 'CONGTIME_PM_S', 'CONGTIME_OP_S',
+ 'CONGTIME_AM_M', 'CONGTIME_PM_M', 'CONGTIME_OP_M',
+ 'AM_Auto_Flow', 'AM_SUT_Flow', 'AM_MUT_Flow', 'AM_Tot_Flow',
+ 'PM_Auto_Flow', 'PM_SUT_Flow', 'PM_MUT_Flow', 'PM_Tot_Flow',
+ 'OP_Auto_Flow', 'OP_SUT_Flow', 'OP_MUT_Flow', 'OP_Tot_Flow',
+ 'Auto_Flow', 'SUT_Flow', 'MUT_Flow', 'Tot_Flow'])
+
+
+ # Break out SCRNLINE field to separate by commas into individual columns
+ df_all[['SCRNLINE']] = df_all[['SCRNLINE']].astype(str)
+ df_all = pd.concat([df_all,df_all['SCRNLINE'].str.split(',', expand = True)], axis = 1)
+ # Change Screenline field names
+ if 1 not in df_all:
+ df_all[1] = 0
+ df_all = df_all.rename(columns = {0:'SCRNLINE1',1:'SCRNLINE2'})
+
+ ## Break out SCRNLINE field to separate by commas into individual columns
+ #df_all = pd.concat([df_all,df_all['SCRNLINE'].str.split(',', expand = True)], axis = 1)
+ ## Change Screenline field names
+ #df_all = df_all.rename(columns = {0:'SCRNLINE1',1:'SCRNLINE2'})
+
+ # Replace null values with 0 in the screenline fields
+ df_all['SCRNLINE1'] = df_all['SCRNLINE1'].replace('',np.nan).fillna(0)
+ df_all['SCRNLINE2'] = df_all['SCRNLINE2'].replace('',np.nan).fillna(0)
+
+ # Convert all flow and time fields to float to make multiplication and other operations run smoothly. Read in as strings
+ df_all[['NO', 'FCLASS', 'LENGTH', 'SCRNLINE1','SCRNLINE2',
+ 'CONGTIME_AM_C', 'CONGTIME_PM_C', 'CONGTIME_OP_C',
+ 'CONGTIME_AM_S', 'CONGTIME_PM_S', 'CONGTIME_OP_S',
+ 'CONGTIME_AM_M', 'CONGTIME_PM_M', 'CONGTIME_OP_M',
+ 'AM_Auto_Flow', 'AM_SUT_Flow', 'AM_MUT_Flow', 'AM_Tot_Flow',
+ 'PM_Auto_Flow', 'PM_SUT_Flow', 'PM_MUT_Flow', 'PM_Tot_Flow',
+ 'OP_Auto_Flow', 'OP_SUT_Flow', 'OP_MUT_Flow', 'OP_Tot_Flow',
+ 'Auto_Flow', 'SUT_Flow', 'MUT_Flow', 'Tot_Flow']] = df_all[['NO', 'FCLASS', 'LENGTH', 'SCRNLINE1','SCRNLINE2',
+ 'CONGTIME_AM_C', 'CONGTIME_PM_C', 'CONGTIME_OP_C',
+ 'CONGTIME_AM_S', 'CONGTIME_PM_S', 'CONGTIME_OP_S',
+ 'CONGTIME_AM_M', 'CONGTIME_PM_M', 'CONGTIME_OP_M',
+ 'AM_Auto_Flow', 'AM_SUT_Flow', 'AM_MUT_Flow', 'AM_Tot_Flow',
+ 'PM_Auto_Flow', 'PM_SUT_Flow', 'PM_MUT_Flow', 'PM_Tot_Flow',
+ 'OP_Auto_Flow', 'OP_SUT_Flow', 'OP_MUT_Flow', 'OP_Tot_Flow',
+ 'Auto_Flow', 'SUT_Flow', 'MUT_Flow', 'Tot_Flow']].astype(float)
+
+ # Convert ID fields to integer
+ df_all[['NO','FCLASS','SCRNLINE1','SCRNLINE2']] = df_all[['NO','FCLASS','SCRNLINE1','SCRNLINE2']].astype(int)
+
+
+ # For links with counts only, used for Pct. Error and Pct. RMSE
+ # Filter out links where count is null and by each condition
+ # All Links with Counts
+ count_df_all = df_all
+ # By AADT Volume
+ under_5k_df_all = df_all[df_all['NO'].isin(under_5k_df['NO'])]
+ btwn_5_10k_df_all = df_all[df_all['NO'].isin(btwn_5_10k_df['NO'])]
+ btwn_10_15k_df_all = df_all[df_all['NO'].isin(btwn_10_15k_df['NO'])]
+ btwn_15_20k_df_all = df_all[df_all['NO'].isin(btwn_15_20k_df['NO'])]
+ btwn_20_30k_df_all = df_all[df_all['NO'].isin(btwn_20_30k_df['NO'])]
+ btwn_30_40k_df_all = df_all[df_all['NO'].isin(btwn_30_40k_df['NO'])]
+ btwn_40_50k_df_all = df_all[df_all['NO'].isin(btwn_40_50k_df['NO'])]
+ # By Functional Class
+ fc1_df_all = df_all[(df_all['FCLASS'] == 1)]
+ fc2_df_all = df_all[(df_all['FCLASS'] == 2)]
+ fc3_df_all = df_all[(df_all['FCLASS'] == 3)]
+ fc4_df_all = df_all[(df_all['FCLASS'] == 4)]
+ fc5_df_all = df_all[(df_all['FCLASS'] == 5)]
+ fc6_df_all = df_all[(df_all['FCLASS'] == 6)]
+ fc7_df_all = df_all[(df_all['FCLASS'] == 7)]
+ fc8_df_all = df_all[(df_all['FCLASS'] == 8)]
+ fc9_df_all = df_all[(df_all['FCLASS'] == 9)]
+ fc10_df_all = df_all[(df_all['FCLASS'] == 10)]
+ fc11_df_all = df_all[(df_all['FCLASS'] == 11)]
+ fc12_df_all = df_all[(df_all['FCLASS'] == 12)]
+ fc30_df_all = df_all[(df_all['FCLASS'] == 30)]
+ fc32_df_all = df_all[(df_all['FCLASS'] == 32)]
+ # By Screenline
+ sl_1_df_all = df_all[(df_all['SCRNLINE1'] == 1) | (df_all['SCRNLINE2'] == 1)]
+ sl_2_df_all = df_all[(df_all['SCRNLINE1'] == 2) | (df_all['SCRNLINE2'] == 2)]
+ sl_3_df_all = df_all[(df_all['SCRNLINE1'] == 3) | (df_all['SCRNLINE2'] == 3)]
+ sl_4_df_all = df_all[(df_all['SCRNLINE1'] == 4) | (df_all['SCRNLINE2'] == 4)]
+ sl_5_df_all = df_all[(df_all['SCRNLINE1'] == 5) | (df_all['SCRNLINE2'] == 5)]
+ sl_6_df_all = df_all[(df_all['SCRNLINE1'] == 6) | (df_all['SCRNLINE2'] == 6)]
+ sl_7_df_all = df_all[(df_all['SCRNLINE1'] == 7) | (df_all['SCRNLINE2'] == 7)]
+ sl_8_df_all = df_all[(df_all['SCRNLINE1'] == 8) | (df_all['SCRNLINE2'] == 8)]
+ sl_9_df_all = df_all[(df_all['SCRNLINE1'] == 9) | (df_all['SCRNLINE2'] == 9)]
+ sl_10_df_all = df_all[(df_all['SCRNLINE1'] == 10) | (df_all['SCRNLINE2'] == 10)]
+ sl_11_df_all = df_all[(df_all['SCRNLINE1'] == 11) | (df_all['SCRNLINE2'] == 11)]
+ sl_12_df_all = df_all[(df_all['SCRNLINE1'] == 12) | (df_all['SCRNLINE2'] == 12)]
+ sl_13_df_all = df_all[(df_all['SCRNLINE1'] == 13) | (df_all['SCRNLINE2'] == 13)]
+ #sl_14_df_all = df_all[(df_all['SCRNLINE1'] == 14) | (df_all['SCRNLINE2'] == 14)]
+ #sl_15_df_all = df_all[(df_all['SCRNLINE1'] == 15) | (df_all['SCRNLINE2'] == 15)]
+ #sl_16_df_all = df_all[(df_all['SCRNLINE1'] == 16) | (df_all['SCRNLINE2'] == 16)]
+ #sl_17_df_all = df_all[(df_all['SCRNLINE1'] == 17) | (df_all['SCRNLINE2'] == 17)]
+ #sl_18_df_all = df_all[(df_all['SCRNLINE1'] == 18) | (df_all['SCRNLINE2'] == 18)]
+ #sl_19_df_all = df_all[(df_all['SCRNLINE1'] == 19) | (df_all['SCRNLINE2'] == 19)]
+ #sl_20_df_all = df_all[(df_all['SCRNLINE1'] == 20) | (df_all['SCRNLINE2'] == 20)]
+ #sl_21_df_all = df_all[(df_all['SCRNLINE1'] == 21) | (df_all['SCRNLINE2'] == 21)]
+ #sl_22_df_all = df_all[(df_all['SCRNLINE1'] == 22) | (df_all['SCRNLINE2'] == 22)]
+ #sl_23_df_all = df_all[(df_all['SCRNLINE1'] == 23) | (df_all['SCRNLINE2'] == 23)]
+ #sl_24_df_all = df_all[(df_all['SCRNLINE1'] == 24) | (df_all['SCRNLINE2'] == 24)]
+ #sl_25_df_all = df_all[(df_all['SCRNLINE1'] == 25) | (df_all['SCRNLINE2'] == 25)]
+ #sl_26_df_all = df_all[(df_all['SCRNLINE1'] == 26) | (df_all['SCRNLINE2'] == 26)]
+
+
+ # Build list of dataframes to loop thru
+ df_list_all = [count_df_all,#internal_df_all,external_df_all,
+ under_5k_df_all,btwn_5_10k_df_all,btwn_10_15k_df_all,btwn_15_20k_df_all,btwn_20_30k_df_all,btwn_30_40k_df_all,btwn_40_50k_df_all,
+ fc1_df_all,fc2_df_all,fc3_df_all,fc4_df_all,fc5_df_all,fc6_df_all,fc7_df_all,fc8_df_all,fc9_df_all,fc10_df_all,fc11_df_all,fc12_df_all,
+ fc30_df_all,fc32_df_all,
+ sl_1_df_all,sl_2_df_all,sl_3_df_all,sl_4_df_all,sl_5_df_all,sl_6_df_all,sl_7_df_all,sl_8_df_all,sl_9_df_all,sl_10_df_all,sl_11_df_all,
+ sl_12_df_all,sl_13_df_all]
+
+
+ # Calculate Auto Total VMT and Total VHT from each dataframe and save in results dataframe
+ y = auto_loc
+ for i in df_list_all:
+ if len(i) == 0: #sum(i.Auto_Flow) == 0.0:
+ y = y + 1
+ continue
+ else:
+ results_df.at[y,"Total VMT"] = vmt(i.Auto_Flow,i.LENGTH)
+ results_df.at[y,"Total VHT"] = vht_dly(i.AM_Auto_Flow,i.CONGTIME_AM_C,i.PM_Auto_Flow,i.CONGTIME_PM_C,i.OP_Auto_Flow,i.CONGTIME_OP_C)
+ y = y + 1
+
+ # Calculate SUT Total VMT and Total VHT from each dataframe and save in results dataframe
+ y = sut_loc
+ for i in df_list_all:
+ if len(i) == 0: #sum(i.Auto_Flow) == 0.0:
+ y = y + 1
+ continue
+ else:
+ results_df.at[y,"Total VMT"] = vmt(i.SUT_Flow,i.LENGTH)
+ results_df.at[y,"Total VHT"] = vht_dly(i.AM_SUT_Flow,i.CONGTIME_AM_S,i.PM_SUT_Flow,i.CONGTIME_PM_S,i.OP_SUT_Flow,i.CONGTIME_OP_S)
+ y = y + 1
+
+ # Calculate MUT Total VMT and Total VHT from each dataframe and save in results dataframe
+ y = mut_loc
+ for i in df_list_all:
+ if len(i) == 0: #sum(i.Auto_Flow) == 0.0:
+ y = y + 1
+ continue
+ else:
+ results_df.at[y,"Total VMT"] = vmt(i.MUT_Flow,i.LENGTH)
+ results_df.at[y,"Total VHT"] = vht_dly(i.AM_MUT_Flow,i.CONGTIME_AM_M,i.PM_MUT_Flow,i.CONGTIME_PM_M,i.OP_MUT_Flow,i.CONGTIME_OP_M)
+ y = y + 1
+
+ # Calculate All Modes Total VMT and Total VHT from each dataframe and save in results dataframe
+ y = allmodes_loc
+ a = auto_loc
+ s = sut_loc
+ m = mut_loc
+ for i in df_list_all:
+ if len(i) == 0: #sum(i.Tot_Flow) == 0.0:
+ y = y + 1
+ continue
+ else:
+ results_df.at[y,"Total VMT"] = results_df.at[a,"Total VMT"] + results_df.at[s,"Total VMT"] + results_df.at[m,"Total VMT"] # vmt(i.Tot_Flow,i.LENGTH)
+ results_df.at[y,"Total VHT"] = results_df.at[a,"Total VHT"] + results_df.at[s,"Total VHT"] + results_df.at[m,"Total VHT"] # vht(i.AM_Tot_Flow,i.CONGTIME_AM,i.MD_Tot_Flow,i.CONGTIME_MD,i.PM_Tot_Flow,i.CONGTIME_PM,i.NI_Tot_Flow,i.CONGTIME_NT)
+ y = y + 1
+ a = a + 1
+ s = s + 1
+ m = m + 1
+
+
+ # Apply the formatting function to specific columns
+ results_df['Percent Error'] = format_percent(results_df['Percent Error'])
+ results_df['Percent RMSE'] = format_percent(results_df['Percent RMSE'])
+ results_df['Total VMT'] = format_commas(results_df['Total VMT'])
+ results_df['Total VHT'] = format_commas(results_df['Total VHT'])
+ results_df['Number of Observations'] = format_commas(results_df['Number of Observations'])
+ results_df['Sum of Counts'] = format_commas(results_df['Sum of Counts'])
+ results_df['Mean of Counts'] = format_commas(results_df['Mean of Counts'])
+ results_df['Median of Counts'] = format_commas(results_df['Median of Counts'])
+ results_df['Count VMT, Links with Counts'] = format_commas(results_df['Count VMT, Links with Counts'])
+ results_df['Modeled VMT, Links with Counts'] = format_commas(results_df['Modeled VMT, Links with Counts'])
+
+
+ # Save Daily Summary file to new timestamped folder
+ results_df.to_csv(proj_dir+"outputs/reports/ModelRun_"+date+"/Assignment Results/AssignmentSummary.csv")
+
+# Transit Unlinked Trips Summary Table
+def transit_report():
+
+ # Pull Lines table attributes
+ Name = VisumPy.helpers.GetMulti(Visum.Net.Lines,"Name", activeOnly = True)
+ Description = VisumPy.helpers.GetMulti(Visum.Net.Lines,"Emme_Description", activeOnly = True)
+ AM_Trips = VisumPy.helpers.GetMulti(Visum.Net.Lines,"AM_UL_TRIPS", activeOnly = True)
+ PM_Trips = VisumPy.helpers.GetMulti(Visum.Net.Lines,"PM_UL_TRIPS", activeOnly = True)
+ OP_Trips = VisumPy.helpers.GetMulti(Visum.Net.Lines,"OP_UL_TRIPS", activeOnly = True)
+ DLY_Trips = VisumPy.helpers.GetMulti(Visum.Net.Lines,"DLY_UL_TRIPS", activeOnly = True)
+
+ # Make Visum list with link data
+ summary_list = [Name, Description, AM_Trips, PM_Trips, OP_Trips, DLY_Trips]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(summary_list), columns = ['Name', 'Description', 'AM_Trips', 'PM_Trips', 'OP_Trips', 'DLY_Trips'])
+
+ # Save Transit results to folder
+ df.to_csv(proj_dir+"outputs/reports/ModelRun_"+date+"/Assignment Results/TransitSummary.csv")
+
+
+
+# Daily
+assignment_summary('AUTO_COUNT_DLY', 'SUT_COUNT_DLY', 'MUT_COUNT_DLY', 'TOT_COUNT_DLY', 'AUTO_VOL_DLY', 'SUT_VOL_DLY', 'MUT_VOL_DLY', 'TOT_VOL_DLY') # 'DAILY_LCV_VOL', 'AM3_CTIME_C', 'AM3_CTIME_T', 'Daily') # Daily VHT Calculated from full day, so AM3_CTIME here is just a placeholder for the function
+
+# Transit
+transit_report()
+
+
diff --git a/ORMetroModel/scripts/CostSkim_Setup.py b/ORMetroModel/scripts/CostSkim_Setup.py
new file mode 100644
index 0000000..0958dd1
--- /dev/null
+++ b/ORMetroModel/scripts/CostSkim_Setup.py
@@ -0,0 +1,158 @@
+# Setting up for PrT cost skimming
+# 6/24/2025 - Luke Gordon (RSG)
+# Adapted from code from Chetan Joshi (PTV)
+
+import tables
+import numpy as np
+import pandas as pd
+import os
+import VisumPy.helpers as h
+import openmatrix as omx
+
+
+def costskim_setup(period, mode, vot):
+
+ # Links
+ # Pull AddVals for skimming
+ addval1 = h.GetMulti(Visum.Net.Links,r"ADDVAL1" , activeOnly = True) # TEMPORARY, NEED TO REPLACE WITH ACTUAL TRAVEL TIME BY PERIOD USING VDF
+ addval2 = h.GetMulti(Visum.Net.Links,r"ADDVAL2" , activeOnly = True)
+ # Pull Toll Fields
+ EA_SOV_TOLL = h.GetMulti(Visum.Net.Links,r"EA_SOV_TOLL", activeOnly = True)
+ EA_SR2_TOLL = h.GetMulti(Visum.Net.Links,r"EA_SR2_TOLL", activeOnly = True)
+ EA_SR3_TOLL = h.GetMulti(Visum.Net.Links,r"EA_SR3_TOLL", activeOnly = True)
+ EA_MT_TOLL = h.GetMulti(Visum.Net.Links,r"EA_MT_TOLL" , activeOnly = True)
+ EA_HT_TOLL = h.GetMulti(Visum.Net.Links,r"EA_HT_TOLL" , activeOnly = True)
+ AM_SOV_TOLL = h.GetMulti(Visum.Net.Links,r"AM_SOV_TOLL", activeOnly = True)
+ AM_SR2_TOLL = h.GetMulti(Visum.Net.Links,r"AM_SR2_TOLL", activeOnly = True)
+ AM_SR3_TOLL = h.GetMulti(Visum.Net.Links,r"AM_SR3_TOLL", activeOnly = True)
+ AM_MT_TOLL = h.GetMulti(Visum.Net.Links,r"AM_MT_TOLL" , activeOnly = True)
+ AM_HT_TOLL = h.GetMulti(Visum.Net.Links,r"AM_HT_TOLL" , activeOnly = True)
+ MD_SOV_TOLL = h.GetMulti(Visum.Net.Links,r"MD_SOV_TOLL", activeOnly = True)
+ MD_SR2_TOLL = h.GetMulti(Visum.Net.Links,r"MD_SR2_TOLL", activeOnly = True)
+ MD_SR3_TOLL = h.GetMulti(Visum.Net.Links,r"MD_SR3_TOLL", activeOnly = True)
+ MD_MT_TOLL = h.GetMulti(Visum.Net.Links,r"MD_MT_TOLL" , activeOnly = True)
+ MD_HT_TOLL = h.GetMulti(Visum.Net.Links,r"MD_HT_TOLL" , activeOnly = True)
+ PM_SOV_TOLL = h.GetMulti(Visum.Net.Links,r"PM_SOV_TOLL", activeOnly = True)
+ PM_SR2_TOLL = h.GetMulti(Visum.Net.Links,r"PM_SR2_TOLL", activeOnly = True)
+ PM_SR3_TOLL = h.GetMulti(Visum.Net.Links,r"PM_SR3_TOLL", activeOnly = True)
+ PM_MT_TOLL = h.GetMulti(Visum.Net.Links,r"PM_MT_TOLL" , activeOnly = True)
+ PM_HT_TOLL = h.GetMulti(Visum.Net.Links,r"PM_HT_TOLL" , activeOnly = True)
+ EV_SOV_TOLL = h.GetMulti(Visum.Net.Links,r"EV_SOV_TOLL", activeOnly = True)
+ EV_SR2_TOLL = h.GetMulti(Visum.Net.Links,r"EV_SR2_TOLL", activeOnly = True)
+ EV_SR3_TOLL = h.GetMulti(Visum.Net.Links,r"EV_SR3_TOLL", activeOnly = True)
+ EV_MT_TOLL = h.GetMulti(Visum.Net.Links,r"EV_MT_TOLL" , activeOnly = True)
+ EV_HT_TOLL = h.GetMulti(Visum.Net.Links,r"EV_HT_TOLL" , activeOnly = True)
+
+
+ # Make Visum list with link data
+ att_list = [addval1,addval2,
+ EA_SOV_TOLL,EA_SR2_TOLL,EA_SR3_TOLL,EA_MT_TOLL,EA_HT_TOLL,
+ AM_SOV_TOLL,AM_SR2_TOLL,AM_SR3_TOLL,AM_MT_TOLL,AM_HT_TOLL,
+ MD_SOV_TOLL,MD_SR2_TOLL,MD_SR3_TOLL,MD_MT_TOLL,MD_HT_TOLL,
+ PM_SOV_TOLL,PM_SR2_TOLL,PM_SR3_TOLL,PM_MT_TOLL,PM_HT_TOLL,
+ EV_SOV_TOLL,EV_SR2_TOLL,EV_SR3_TOLL,EV_MT_TOLL,EV_HT_TOLL]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['addval1','addval2',
+ 'EA_SOV_TOLL','EA_SR2_TOLL','EA_SR3_TOLL','EA_MT_TOLL','EA_HT_TOLL',
+ 'AM_SOV_TOLL','AM_SR2_TOLL','AM_SR3_TOLL','AM_MT_TOLL','AM_HT_TOLL',
+ 'MD_SOV_TOLL','MD_SR2_TOLL','MD_SR3_TOLL','MD_MT_TOLL','MD_HT_TOLL',
+ 'PM_SOV_TOLL','PM_SR2_TOLL','PM_SR3_TOLL','PM_MT_TOLL','PM_HT_TOLL',
+ 'EV_SOV_TOLL','EV_SR2_TOLL','EV_SR3_TOLL','EV_MT_TOLL','EV_HT_TOLL'])
+
+ # TEMPORARY, NEED TO REPLACE WITH ACTUAL TRAVEL TIME BY PERIOD USING VDF AND VOLUME
+ df['time'] = df['addval1']
+
+ # Mode VOTs
+ sov_low = 3.39
+ sov_med = 7.49
+ sov_high = 20.51
+ sr2_low = 5.11
+ sr2_med = 10.92
+ sr2_high = 27.30
+ sr3_low = 7.76
+ sr3_med = 16.82
+ sr3_high = 44.04
+
+ # Set AddVal2 to Cost in Time
+ # SOV, Low VOT
+ if mode == "SOV" and vot == "Low":
+ df['addval2'] = df['time'] + (df[period+'_'+mode+'_TOLL'] / sov_low) * 3600 # Gen Cost (Time in Seconds)
+ # SR2, Low VOT
+ elif mode == "SR2" and vot == "Low":
+ df['addval2'] = df['time'] + (df[period+'_'+mode+'_TOLL'] / sr2_low) * 3600 # Gen Cost (Time in Seconds)
+ # SR3, Low VOT
+ elif mode == "SR3" and vot == "Low":
+ df['addval2'] = df['time'] + (df[period+'_'+mode+'_TOLL'] / sr3_low) * 3600 # Gen Cost (Time in Seconds)
+ # SOV, Medium VOT
+ elif mode == "SOV" and vot == "Medium":
+ df['addval2'] = df['time'] + (df[period+'_'+mode+'_TOLL'] / sov_med) * 3600 # Gen Cost (Time in Seconds)
+ # SR2, Medium VOT
+ elif mode == "SR2" and vot == "Medium":
+ df['addval2'] = df['time'] + (df[period+'_'+mode+'_TOLL'] / sr2_med) * 3600 # Gen Cost (Time in Seconds)
+ # SR3, Medium VOT
+ elif mode == "SR3" and vot == "Medium":
+ df['addval2'] = df['time'] + (df[period+'_'+mode+'_TOLL'] / sr3_med) * 3600 # Gen Cost (Time in Seconds)
+ # SOV, High VOT
+ elif mode == "SOV" and vot == "High":
+ df['addval2'] = df['time'] + (df[period+'_'+mode+'_TOLL'] / sov_high) * 3600 # Gen Cost (Time in Seconds)
+ # SR2, High VOT
+ elif mode == "SR2" and vot == "High":
+ df['addval2'] = df['time'] + (df[period+'_'+mode+'_TOLL'] / sr2_high) * 3600 # Gen Cost (Time in Seconds)
+ # SR3, High VOT
+ elif mode == "SR3" and vot == "High":
+ df['addval2'] = df['time'] + (df[period+'_'+mode+'_TOLL'] / sr3_high) * 3600 # Gen Cost (Time in Seconds)
+
+
+ # Set tolls by period and mode
+ df['toll'] = df[period+'_'+mode+'_TOLL'] # Money in Dollars
+
+ # Set fields back in Visum
+ # Gen Cost
+ h.SetMulti(Visum.Net.Links ,r"ADDVAL2", df['addval2'])
+ # Toll
+ if mode == "SOV":
+ h.SetMulti(Visum.Net.Links ,r"TOLL_PRTSYS(S)", df['toll'])
+ elif mode == "SR2":
+ h.SetMulti(Visum.Net.Links ,r"TOLL_PRTSYS(SR2)", df['toll'])
+ elif mode == "SR3":
+ h.SetMulti(Visum.Net.Links ,r"TOLL_PRTSYS(SR3)", df['toll'])
+
+
+
+
+
+ # REPEAT FOR CONNECTORS
+ # Pull AddVals for skimming
+ addval1 = h.GetMulti(Visum.Net.Connectors,r"ADDVAL1" , activeOnly = True) # TEMPORARY, NEED TO REPLACE WITH ACTUAL TRAVEL TIME BY PERIOD USING VDF
+
+ # Make Visum list with link data
+ att_list = [addval1]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['addval1'])
+
+ # TEMPORARY, NEED TO REPLACE WITH ACTUAL TRAVEL TIME BY PERIOD USING VDF AND VOLUME
+ df['time'] = df['addval1']
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.Connectors ,r"ADDVAL1", df['time'])
+ h.SetMulti(Visum.Net.Connectors ,r"ADDVAL2", df['time'])
+
+
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Pull "Code" field from procedure sequence containing Code, DSegCode, and filename
+procedure_code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like -> '[["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]'
+procedure_codes = eval(procedure_code) # Example: outputs a list of lists like -> [["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]
+
+# Loop thru each matrix set in the "Code" field and export
+#for x in range(len(procedure_codes)):
+per = procedure_codes[0]
+m = procedure_codes[1]
+tval = procedure_codes[2]
+
+costskim_setup(per, m, tval)
+
diff --git a/ORMetroModel/scripts/Export_Feedback_OMXs.py b/ORMetroModel/scripts/Export_Feedback_OMXs.py
new file mode 100644
index 0000000..a4e95a8
--- /dev/null
+++ b/ORMetroModel/scripts/Export_Feedback_OMXs.py
@@ -0,0 +1,42 @@
+# Export OMX files from LCOG model based on "Code" field in Procedure Sequence
+# 4/29/2025 - Luke Gordon (RSG)
+# Adapted from code from Chetan Joshi (PTV)
+
+import tables
+import numpy as np
+import pandas as pd
+import os
+import VisumPy.helpers as h
+import openmatrix as omx
+
+
+
+def omx_export(mtx_code, mtx_dseg, omx_fn): #mtx_num, mtx_code):
+ omx_file = omx.open_file(omx_fn, 'w')
+ # Pull matrix out but close .omx file if there is an error
+ try:
+ mx = h.GetMatrixRaw(Visum, {"CODE": mtx_code , "DSegCode": mtx_dseg})
+ except Exception as e:
+ print(f"Error getting matrix for CODE={mtx_code}, DSegCode={mtx_dseg}: {e}")
+ omx_file.close()
+ core_name = mtx_code
+ omx_file[core_name] = mx
+ omx_file.close()
+
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Pull "Code" field from procedure sequence containing Code, DSegCode, and filename
+procedure_code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like -> '[["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]'
+procedure_codes = eval(procedure_code) # Example: outputs a list of lists like -> [["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]
+
+# Loop thru each matrix set in the "Code" field and export
+for x in range(len(procedure_codes)):
+ code = procedure_codes[x][0]
+ dsegcode = procedure_codes[x][1]
+ filename = procedure_codes[x][2]
+
+
+ omx_export(code, dsegcode, proj_dir + "outputs\\assignment\\feedback\\" + filename)
+
diff --git a/ORMetroModel/scripts/Export_FinalAssign_OMXs.py b/ORMetroModel/scripts/Export_FinalAssign_OMXs.py
new file mode 100644
index 0000000..90b4448
--- /dev/null
+++ b/ORMetroModel/scripts/Export_FinalAssign_OMXs.py
@@ -0,0 +1,42 @@
+# Export OMX files from LCOG model based on "Code" field in Procedure Sequence
+# 4/29/2025 - Luke Gordon (RSG)
+# Adapted from code from Chetan Joshi (PTV)
+
+import tables
+import numpy as np
+import pandas as pd
+import os
+import VisumPy.helpers as h
+import openmatrix as omx
+
+
+
+def omx_export(mtx_code, mtx_dseg, omx_fn): #mtx_num, mtx_code):
+ omx_file = omx.open_file(omx_fn, 'w')
+ # Pull matrix out but close .omx file if there is an error
+ try:
+ mx = h.GetMatrixRaw(Visum, {"CODE": mtx_code , "DSegCode": mtx_dseg})
+ except Exception as e:
+ print(f"Error getting matrix for CODE={mtx_code}, DSegCode={mtx_dseg}: {e}")
+ omx_file.close()
+ core_name = mtx_code
+ omx_file[core_name] = mx
+ omx_file.close()
+
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Pull "Code" field from procedure sequence containing Code, DSegCode, and filename
+procedure_code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like -> '[["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]'
+procedure_codes = eval(procedure_code) # Example: outputs a list of lists like -> [["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]
+
+# Loop thru each matrix set in the "Code" field and export
+for x in range(len(procedure_codes)):
+ code = procedure_codes[x][0]
+ dsegcode = procedure_codes[x][1]
+ filename = procedure_codes[x][2]
+
+
+ omx_export(code, dsegcode, proj_dir + "outputs\\assignment\\final_assign\\" + filename)
+
diff --git a/ORMetroModel/scripts/Export_Skim_OMXs.py b/ORMetroModel/scripts/Export_Skim_OMXs.py
new file mode 100644
index 0000000..f7113a0
--- /dev/null
+++ b/ORMetroModel/scripts/Export_Skim_OMXs.py
@@ -0,0 +1,70 @@
+# Export OMX files from LCOG model based on "Code" field in Procedure Sequence
+# 4/29/2025 - Luke Gordon (RSG)
+# Adapted from code from Chetan Joshi (PTV)
+
+import tables
+import numpy as np
+import pandas as pd
+import os
+import VisumPy.helpers as h
+import openmatrix as omx
+
+
+
+def omx_export(mtx_code, corename, mtx_dseg, omx_fn):
+
+ # Grab zone ID list for omx matrix labels
+ zones = np.array(h.GetMulti(Visum.Net.Zones,r"NO", activeOnly = True))
+
+ omx_file = omx.open_file(omx_fn, 'a')
+ # Pull matrix out but close .omx file if there is an error
+ try:
+ mx = h.GetMatrixRaw(Visum, {"CODE": mtx_code , "DSegCode": mtx_dseg})
+ except Exception as e:
+ print(f"Error getting matrix for CODE={mtx_code}, DSegCode={mtx_dseg}: {e}")
+ omx_file.close()
+
+ core_name = corename
+ # Delete the core if it already exists
+ if core_name in omx_file:
+ del omx_file[core_name]
+
+
+ #num_zones = len(zones) # Assumes square matrix
+ ## Create new labels starting from 1
+ #new_labels = np.arange(1, num_zones + 1)
+ #try:
+ # # Check for Mapping and overwrite it
+ # omx_file.mapping('zone_ids')[:] = new_labels
+ #except KeyError:
+ # # Mapping doesn't exist yet — create it
+ # omx_file.createMapping('zone_ids', new_labels) # Just this line worked, but now that the mapping exists it breaks
+
+ omx_file[core_name] = mx.astype(np.float32)
+
+ omx_file.close()
+
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Pull "Code" field from procedure sequence containing Code, DSegCode, and filename
+procedure_code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like -> '[["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]'
+procedure_codes = eval(procedure_code) # Example: outputs a list of lists like -> [["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]
+
+# Delete omx file if it exists
+filename = procedure_codes[0][3]
+if os.path.exists(filename):
+ os.remove(filename)
+
+
+# Loop thru each matrix set in the "Code" field and export
+for x in range(len(procedure_codes)):
+ code = procedure_codes[x][0]
+ core = procedure_codes[x][1]
+ dsegcode = procedure_codes[x][2]
+ filename = procedure_codes[x][3]
+
+
+ omx_export(code, core, dsegcode, proj_dir + "outputs\\skims\\" + filename)
+
diff --git a/ORMetroModel/scripts/Import_Feedback_OMXs.py b/ORMetroModel/scripts/Import_Feedback_OMXs.py
new file mode 100644
index 0000000..dc32e6f
--- /dev/null
+++ b/ORMetroModel/scripts/Import_Feedback_OMXs.py
@@ -0,0 +1,41 @@
+# Export OMX files from LCOG model based on "Code" field in Procedure Sequence
+# 4/30/2025 - Luke Gordon (RSG)
+# Adapted from code from Chetan Joshi (PTV)
+
+import openmatrix as omx
+import os
+import shutil
+import numpy as np
+import pandas as pd
+import VisumPy.matrices as vmx
+import VisumPy.helpers as h
+
+
+def omx_import(mtx_code, mtx_dseg, omx_fn):
+ omx_file = omx.open_file(omx_fn, 'r')
+ mx_name = omx_file.list_matrices()[0] # 1 matrix per .omx file
+ mx_array = omx_file[mx_name][:]
+ # Read matrix but close .omx file if there is an error
+ try:
+ h.SetMatrixRaw(Visum, {"CODE": mtx_code , "DSegCode": mtx_dseg}, mx_array)
+ except Exception as e:
+ print(f"Error getting matrix for CODE={mtx_code}, DSegCode={mtx_dseg}: {e}")
+ omx_file.close()
+ omx_file.close()
+
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Pull "Code" field from procedure sequence containing Code, DSegCode, and filename
+procedure_code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like -> '[["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]'
+procedure_codes = eval(procedure_code) # Example: outputs a list of lists like -> [["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]
+
+# Loop thru each matrix set in the "Code" field and export
+for x in range(len(procedure_codes)):
+ code = procedure_codes[x][0]
+ dsegcode = procedure_codes[x][1]
+ filename = procedure_codes[x][2]
+
+
+ omx_import(code, dsegcode, proj_dir + "outputs\\assignment\\feedback\\" + filename)
diff --git a/ORMetroModel/scripts/Import_FinalAssign_OMXs.py b/ORMetroModel/scripts/Import_FinalAssign_OMXs.py
new file mode 100644
index 0000000..b3b845a
--- /dev/null
+++ b/ORMetroModel/scripts/Import_FinalAssign_OMXs.py
@@ -0,0 +1,41 @@
+# Export OMX files from LCOG model based on "Code" field in Procedure Sequence
+# 4/30/2025 - Luke Gordon (RSG)
+# Adapted from code from Chetan Joshi (PTV)
+
+import openmatrix as omx
+import os
+import shutil
+import numpy as np
+import pandas as pd
+import VisumPy.matrices as vmx
+import VisumPy.helpers as h
+
+
+def omx_import(mtx_code, mtx_dseg, omx_fn):
+ omx_file = omx.open_file(omx_fn, 'r')
+ mx_name = omx_file.list_matrices()[0] # 1 matrix per .omx file
+ mx_array = omx_file[mx_name][:]
+ # Read matrix but close .omx file if there is an error
+ try:
+ h.SetMatrixRaw(Visum, {"CODE": mtx_code , "DSegCode": mtx_dseg}, mx_array)
+ except Exception as e:
+ print(f"Error getting matrix for CODE={mtx_code}, DSegCode={mtx_dseg}: {e}")
+ omx_file.close()
+ omx_file.close()
+
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Pull "Code" field from procedure sequence containing Code, DSegCode, and filename
+procedure_code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like -> '[["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]'
+procedure_codes = eval(procedure_code) # Example: outputs a list of lists like -> [["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]
+
+# Loop thru each matrix set in the "Code" field and export
+for x in range(len(procedure_codes)):
+ code = procedure_codes[x][0]
+ dsegcode = procedure_codes[x][1]
+ filename = procedure_codes[x][2]
+
+
+ omx_import(code, dsegcode, proj_dir + "outputs\\assignment\\final_assign\\" + filename)
diff --git a/ORMetroModel/scripts/Import_WarmStart_OMXs.py b/ORMetroModel/scripts/Import_WarmStart_OMXs.py
new file mode 100644
index 0000000..55edd0c
--- /dev/null
+++ b/ORMetroModel/scripts/Import_WarmStart_OMXs.py
@@ -0,0 +1,41 @@
+# Export OMX files from LCOG model based on "Code" field in Procedure Sequence
+# 4/30/2025 - Luke Gordon (RSG)
+# Adapted from code from Chetan Joshi (PTV)
+
+import openmatrix as omx
+import os
+import shutil
+import numpy as np
+import pandas as pd
+import VisumPy.matrices as vmx
+import VisumPy.helpers as h
+
+
+def omx_import(mtx_code, mtx_dseg, omx_fn):
+ omx_file = omx.open_file(omx_fn, 'r')
+ mx_name = omx_file.list_matrices()[0] # 1 matrix per .omx file
+ mx_array = omx_file[mx_name][:]
+ # Read matrix but close .omx file if there is an error
+ try:
+ h.SetMatrixRaw(Visum, {"CODE": mtx_code , "DSegCode": mtx_dseg}, mx_array)
+ except Exception as e:
+ print(f"Error getting matrix for CODE={mtx_code}, DSegCode={mtx_dseg}: {e}")
+ omx_file.close()
+ omx_file.close()
+
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Pull "Code" field from procedure sequence containing Code, DSegCode, and filename
+procedure_code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like -> '[["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]'
+procedure_codes = eval(procedure_code) # Example: outputs a list of lists like -> [["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]
+
+# Loop thru each matrix set in the "Code" field and export
+for x in range(len(procedure_codes)):
+ code = procedure_codes[x][0]
+ dsegcode = procedure_codes[x][1]
+ filename = procedure_codes[x][2]
+
+
+ omx_import(code, dsegcode, proj_dir + "warmstart_omxs\\" + filename)
diff --git a/ORMetroModel/scripts/Initialize_Output_Bins.py b/ORMetroModel/scripts/Initialize_Output_Bins.py
new file mode 100644
index 0000000..6f0611c
--- /dev/null
+++ b/ORMetroModel/scripts/Initialize_Output_Bins.py
@@ -0,0 +1,37 @@
+# Set output_date field in Visum network attributes and create folders for reports
+
+"""
+created 5/14/2025
+
+@author: luke.gordon
+
+"""
+
+# Libraries
+import VisumPy.helpers
+import VisumPy.excel
+import pandas as pd
+import numpy as np
+from datetime import datetime
+import os.path
+
+
+
+# Calculate timestamp for folder name and save in Network attribute in Visum
+date = datetime.now().strftime("(%Y-%m-%d)-%H_%M_%S")
+Visum.Net.SetAttValue("output_date", date)
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Create timestamped folder for all results
+os.mkdir(proj_dir+"outputs/reports/ModelRun_"+date)
+
+# Create folder for storing this runs assignment quality results
+os.mkdir(proj_dir+"outputs/reports/ModelRun_"+date+"/PrT Assignment Quality Reports")
+
+# Create folder for storing this runs assignment results (Pct. Error, RMSE, etc.)
+os.mkdir(proj_dir+"outputs/reports/ModelRun_"+date+"/Assignment Results")
+
+# Create folder for PuT assignment stats
+os.mkdir(proj_dir+"outputs/reports/ModelRun_"+date+"/PuT Assignment Stats")
\ No newline at end of file
diff --git a/ORMetroModel/scripts/KNRConnectors_Setup.py b/ORMetroModel/scripts/KNRConnectors_Setup.py
new file mode 100644
index 0000000..89f2741
--- /dev/null
+++ b/ORMetroModel/scripts/KNRConnectors_Setup.py
@@ -0,0 +1,194 @@
+
+#script to insert KNR connectors
+# Chetan Joshi, PTV Portland OR 6/25/2025
+
+import numpy as np
+import scipy.spatial
+import json
+import csv
+import os
+import VisumPy.helpers as h
+import pandas as pd
+PRIO = 20480
+
+
+_TABLE = "KnRConstraints"
+SCALEFACTOR = 5280
+
+CONSPEED = 30 #mph
+# FN = os.path.join(Visum.GetPath(2), "taz_connectors.json")
+FN = os.path.join(Visum.GetPath(2), "connectors_knr.net")
+
+def vision_header():
+ vision_head = [["$VISION"],
+ ["$VERSION:VERSNR", "FILETYPE", "LANGUAGE", "UNIT"],
+ [15,"Net","ENG","MI"]]
+ return vision_head
+
+
+def create_knr_connectors():
+
+ # Check for type 10 connectors (knr connectors), if they exist, don't add any more connectors
+ checktype = np.array(h.GetMulti(Visum.Net.Connectors,r"TYPENO", activeOnly = True))
+ if 10 in checktype:
+ Visum.Log(PRIO, 'KNR connectors already built!')
+ # Save original TSysSet in TSYS_HOLDING for later use
+ #tsysorig = h.GetMulti(Visum.Net.Connectors,r"TSYSSET", activeOnly = True)
+ #h.SetMulti(Visum.Net.Connectors, r"TSYS_HOLDING", tsysorig)
+
+ return
+ else:
+ Visum.Log(PRIO, 'Building KNR Connectors')
+ # Save original TSysSet in TSYS_HOLDING for later use
+ #tsysorig = h.GetMulti(Visum.Net.Connectors,r"TSYSSET", activeOnly = True)
+ #h.SetMulti(Visum.Net.Connectors, r"TSYS_HOLDING", tsysorig)
+
+
+
+ Visum.Log(PRIO, 'generate search list...')
+ tsys_constr = dict([tsys, [maxdist, maxstops]] for tsys, maxdist, maxstops in Visum.Net.TableDefinitions.ItemByKey(_TABLE).TableEntries.GetMultipleAttributes(["TSYS", "MAXDIST", "MAXSTOPS"]))
+
+ cutoff = 5*SCALEFACTOR
+ tazs = Visum.Net.Zones.GetMultiAttValues("NO", False)
+ stp_no = np.array(Visum.Net.StopAreas.GetMultiAttValues("NO", False), dtype='int')[:, 1]
+ stop_tsys = dict([[sno, [nodeno, tsys]] for sno, nodeno, tsys in Visum.Net.StopAreas.GetMultipleAttributes(["NO", "NODENO", "DISTINCT:STOPPOINTS\\TSYSSET"])])
+ taz_xy = np.array(Visum.Net.Zones.GetMultipleAttributes(["XCOORD", "YCOORD"], False))
+ stp_xy = np.array(Visum.Net.StopAreas.GetMultipleAttributes(["XCOORD", "YCOORD"], False))
+
+ Visum.Log(PRIO, 'calculate nearest stop in catchment for each taz...')
+
+ distance = scipy.spatial.distance.cdist(taz_xy, stp_xy, 'euclidean')
+ result = dict()
+ connector_table = [["$CONNECTOR:ZONENO","NODENO","DIRECTION","TYPENO","TSYSSET"]]
+ for ix, taz in tazs:
+ connector_nodes = set()
+ catchment_stops = stp_no.compress(distance[ix-1, :] <= cutoff)
+ catchment_distance = distance[ix-1, :].compress(distance[ix-1, :] <= cutoff)
+
+ for tsys in tsys_constr:
+ max_dist, max_stops = tsys_constr[tsys]
+ max_dist = max_dist*SCALEFACTOR
+ stops_added = 0
+ for dist, stopno in sorted(zip(catchment_distance, catchment_stops)):
+ snode, stsys = stop_tsys[stopno]
+ stsys = stsys.split(",")
+ if dist < max_dist and tsys in stsys:
+ connector_nodes.add(snode)
+ stops_added+=1
+ if stops_added >= max_stops:
+ break
+
+ result[taz] = list(connector_nodes)
+
+ # THIS IS HOW CONNECTORS ARE ADDED USING COM - BUT THIS IS TOO SLOW: SO USE NET FILE BATCHING INSTEAD.
+ # for node in connector_nodes:
+ # if Visum.Net.Connectors.ExistsByKey(node, taz):
+ # Visum.Log(PRIO, "{} -> {} | already exists!".format(taz, node))
+ # Visum.Net.Connectors.SourceItemByKey(taz, node).SetAttValue("TypeNo", 7)
+ # else:
+ # connector = Visum.Net.AddConnector(taz, node)
+ # connector.SetAttValue("TypeNo", 10)
+
+ # GENERATE NET FILE BATCH TABLE FOR CONNECTORS.
+ # need to preserve original connectors esp: walk to destination(?)
+ for node in connector_nodes:
+ # --- ["$CONNECTOR:ZONENO","NODENO","DIRECTION","TYPENO"]
+ if Visum.Net.Connectors.ExistsByKey(node, taz):
+ # Visum.Log(PRIO, "{} -> {} | already exists!".format(taz, node))
+ connector_table.append([taz, node, 'O', 7, 'i']) # origin/access connector
+ dtsys = Visum.Net.Connectors.DestItemByKey(node, taz).AttValue("TSYSSET")
+ connector_table.append([taz, node, 'D', 7, dtsys]) # destination/egress connector
+ else:
+ connector_table.append([taz, node, 'O', 10, 'i']) # origin/access connector
+ connector_table.append([taz, node, 'D', 10, '']) # destination/egress connector
+
+ Visum.Log(PRIO, 'write results to file...')
+ # with open(FN, 'w') as out_file:
+ # json.dump(result, out_file, indent=2)
+
+ with open(FN, 'w', newline='') as csvfile:
+ writer = csv.writer(csvfile, delimiter =';')
+ writer.writerows(vision_header())
+ writer.writerows(connector_table)
+
+ Visum.Log(PRIO, 'read knr connectors...')
+
+ loadnetctrl = Visum.IO.CreateAddNetReadController() # read additionally controller
+ loadnetctrl.SetWhatToDo("CONNECTOR", 5) # conflict handling - overwrite attributes
+ Visum.IO.LoadNet(FN, ReadAdditive=True, RouteSearch=None, AddNetRead=loadnetctrl, NormalizePolygons=False,
+ MergeSameCoordPolygonPoints=False, DecimalsForMergeSameCoordPolygonPoints=-1)
+
+ # Set Length for direction not previously set
+ length = h.GetMulti(Visum.Net.Connectors,r"LENGTH" , activeOnly = True)
+ zoneno = h.GetMulti(Visum.Net.Connectors,r"ZONENO" , activeOnly = True)
+ nodeno = h.GetMulti(Visum.Net.Connectors,r"NODENO" , activeOnly = True)
+ att_list = [length,zoneno,nodeno]
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['length','zoneno','nodeno'])
+ df[['zoneno','nodeno']] = df[['zoneno','nodeno']].astype(str)
+ df['concat'] = df['zoneno'] + df['nodeno']
+ df_unique = df.groupby('concat').agg(length_max=('length', 'max')).reset_index()
+ df = pd.merge(df, df_unique, on='concat', how='left')
+ h.SetMulti(Visum.Net.Connectors,r"LENGTH" ,df['length_max'])
+
+
+
+ Visum.Log(PRIO, 'done!')
+
+
+def set_connector_properties(knrdirection):
+ connector_type_dir = Visum.Net.Connectors.GetMultipleAttributes(["TypeNo", "Direction", "Length", "TSYS_HOLDING"])
+ connector_tsys = []
+ connector_time = []
+ # we assume here that KNR is not open on any connector to start
+ for typeno, direction, distance, tsys in connector_type_dir:
+ if knrdirection == "KTW":
+ if direction == 1: # Origin, leaving a zone
+ if typeno in [7, 10]:
+ connector_tsys.append("i")
+ connector_time.append([3600*distance/CONSPEED, 999999])
+ else:
+ connector_tsys.append("")
+ connector_time.append([999999, 999999])
+
+ elif direction == 2: # Destination, entering a zone
+ if typeno == 10:
+ # a knr drive only connector- so no walk on this.
+ connector_tsys.append("")
+ connector_time.append([999999, 999999])
+ else:
+ # could be a walk destination connector,
+ # we assume here that KNR is not open on any connector to start so keep Tsys the way it was (else: could also set to 'w')
+ connector_tsys.append(tsys)
+ connector_time.append([999999, 3600*distance/2.5])
+ elif knrdirection == "WTK":
+ if direction == 2: # Destination, entering a zone
+ if typeno in [7, 10]:
+ connector_tsys.append("i")
+ connector_time.append([3600*distance/CONSPEED, 999999])
+ else:
+ connector_tsys.append("")
+ connector_time.append([999999, 999999])
+ elif direction == 1: # Origin, leaving a zone
+ if typeno == 10:
+ # a knr drive only connector- so no walk on this.
+ connector_tsys.append("")
+ connector_time.append([999999, 999999])
+ else:
+ # could be a walk destination connector,
+ # we assume here that KNR is not open on any connector to start so keep Tsys the way it was (else: could also set to 'w')
+ connector_tsys.append(tsys)
+ connector_time.append([999999, 3600*distance/2.5])
+ elif knrdirection == "WTW":
+ connector_tsys.append(tsys)
+ connector_time.append([999999, 3600*distance/2.5])
+
+ h.SetMulti(Visum.Net.Connectors, "TSysSet", connector_tsys)
+ Visum.Net.Connectors.SetMultipleAttributes(["T0_TSYS(I)", "T0_TSYS(W)"], connector_time)
+
+
+create_knr_connectors()
+
+
+proj_dir = Visum.GetPath(2)
+knrdir = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE")
+set_connector_properties(knrdir)
diff --git a/ORMetroModel/scripts/OP_SetTransitRunTimes.py b/ORMetroModel/scripts/OP_SetTransitRunTimes.py
new file mode 100644
index 0000000..4b6f5c5
--- /dev/null
+++ b/ORMetroModel/scripts/OP_SetTransitRunTimes.py
@@ -0,0 +1,76 @@
+# replicate Emme TTF in Visum
+# chetan joshi, ptv portland or 2/11/2025
+# @luke.gordon, adapted further for LCOG model 2/11/2025
+
+ # a ft1 =timau * 1.15
+ # a ft2 =timau * 1.2
+ # a ft3 =timau
+ # a ft4 =timau * 1.09
+ # a ft5 =60 * length / us1
+ # a ft6 =60 * length / us1 + 60 * length / 180
+ # a ft11 =(timau * 1.15)
+ # a ft12 =(timau * 1.3)
+ # a ft13 =(timau)
+ # a ft14 =(timau * 1.09)
+ # a ft15 =(60 * length / us1)
+ # a ft16 =(60 * length / us1 + 60 * length / 180)
+ # a ft21 =timau * 1.05
+ # a ft22 =timau * 1.03
+
+default_speed = 30 #mph -> default speed of transit
+
+def calc_ttf(ft, timau, length, us1):
+ if us1 <= 0:
+ us1 = default_speed
+
+ if length:
+ transit_time = 3600*length / default_speed
+
+ if timau < 999:
+ if ft == 1:
+ transit_time = timau * 1.15
+ elif ft == 2:
+ transit_time = timau * 1.20
+ elif ft == 3:
+ transit_time = timau
+ elif ft == 4:
+ transit_time = timau * 1.09
+ elif ft == 5:
+ transit_time = 60* (60 * length / us1)
+ elif ft == 6:
+ transit_time = 60* (60 * length / us1 + 60 * length / 180)
+ elif ft ==11:
+ transit_time = timau * 1.15
+ elif ft ==12:
+ transit_time = timau * 1.30
+ elif ft ==13:
+ transit_time = timau
+ elif ft ==14:
+ transit_time =timau * 1.09
+ elif ft == 15:
+ transit_time = 60* (60 * length / us1)
+ elif ft == 16:
+ transit_time = 60* (60 * length / us1 + 60 * length / 180)
+ elif ft == 21:
+ transit_time = timau * 1.05
+ elif ft == 22:
+ transit_time = timau * 1.03
+ else:
+ transit_time = 1
+
+ return transit_time
+
+def update_transit_time():
+ tpitems = Visum.Net.TimeProfileItems.GetMultipleAttributes(["LINEROUTEITEM\\EMME_TTFINDEX", "SUM:USEDLINEROUTEITEMS\\OUTLINK\\AddVal2","SUM:USEDLINEROUTEITEMS\\POSTLENGTH",
+ "LINEROUTEITEM\\EMME_DATA1"])
+ result = []
+ for ft, timau, length, us1 in tpitems:
+ haul_time = calc_ttf(ft, timau, length, us1)
+ result.append([haul_time, ])
+
+ Visum.Net.TimeProfileItems.SetMultipleAttributes(["AddVal"], result)
+
+
+update_transit_time()
+
+
diff --git a/ORMetroModel/scripts/PK_SetTransitRunTimes.py b/ORMetroModel/scripts/PK_SetTransitRunTimes.py
new file mode 100644
index 0000000..9eb0479
--- /dev/null
+++ b/ORMetroModel/scripts/PK_SetTransitRunTimes.py
@@ -0,0 +1,76 @@
+# replicate Emme TTF in Visum
+# chetan joshi, ptv portland or 2/11/2025
+# @luke.gordon, adapted further for LCOG model 2/11/2025
+
+ # a ft1 =timau * 1.15
+ # a ft2 =timau * 1.2
+ # a ft3 =timau
+ # a ft4 =timau * 1.09
+ # a ft5 =60 * length / us1
+ # a ft6 =60 * length / us1 + 60 * length / 180
+ # a ft11 =(timau * 1.15)
+ # a ft12 =(timau * 1.3)
+ # a ft13 =(timau)
+ # a ft14 =(timau * 1.09)
+ # a ft15 =(60 * length / us1)
+ # a ft16 =(60 * length / us1 + 60 * length / 180)
+ # a ft21 =timau * 1.05
+ # a ft22 =timau * 1.03
+
+default_speed = 30 #mph -> default speed of transit
+
+def calc_ttf(ft, timau, length, us1):
+ if us1 <= 0:
+ us1 = default_speed
+
+ if length:
+ transit_time = 3600*length / default_speed
+
+ if timau < 999:
+ if ft == 1:
+ transit_time = timau * 1.15
+ elif ft == 2:
+ transit_time = timau * 1.20
+ elif ft == 3:
+ transit_time = timau
+ elif ft == 4:
+ transit_time = timau * 1.09
+ elif ft == 5:
+ transit_time = 60* (60 * length / us1)
+ elif ft == 6:
+ transit_time = 60* (60 * length / us1 + 60 * length / 180)
+ elif ft ==11:
+ transit_time = timau * 1.15
+ elif ft ==12:
+ transit_time = timau * 1.30
+ elif ft ==13:
+ transit_time = timau
+ elif ft ==14:
+ transit_time =timau * 1.09
+ elif ft == 15:
+ transit_time = 60* (60 * length / us1)
+ elif ft == 16:
+ transit_time = 60* (60 * length / us1 + 60 * length / 180)
+ elif ft == 21:
+ transit_time = timau * 1.05
+ elif ft == 22:
+ transit_time = timau * 1.03
+ else:
+ transit_time = 1
+
+ return transit_time
+
+def update_transit_time():
+ tpitems = Visum.Net.TimeProfileItems.GetMultipleAttributes(["LINEROUTEITEM\\EMME_TTFINDEX", "SUM:USEDLINEROUTEITEMS\\OUTLINK\\AddVal1","SUM:USEDLINEROUTEITEMS\\POSTLENGTH",
+ "LINEROUTEITEM\\EMME_DATA1"])
+ result = []
+ for ft, timau, length, us1 in tpitems:
+ haul_time = calc_ttf(ft, timau, length, us1)
+ result.append([haul_time, ])
+
+ Visum.Net.TimeProfileItems.SetMultipleAttributes(["AddVal"], result)
+
+
+update_transit_time()
+
+
diff --git a/ORMetroModel/scripts/PrTAssignmentQualityData_Export.py b/ORMetroModel/scripts/PrTAssignmentQualityData_Export.py
new file mode 100644
index 0000000..f859866
--- /dev/null
+++ b/ORMetroModel/scripts/PrTAssignmentQualityData_Export.py
@@ -0,0 +1,59 @@
+# LCOG: Pull PrT Assignment Quality Report from Visum and save as csv file
+
+"""
+created 5/14/2025
+
+@author: luke.gordon
+
+"""
+
+# Libraries
+import VisumPy.helpers
+import VisumPy.excel
+import pandas as pd
+import numpy as np
+import csv
+#from datetime import datetime
+import math
+import os.path
+
+
+# Pull timestamp for folder name and iteration number from Visum network attribute
+date = Visum.Net.AttValue("output_date")
+iter = Visum.Net.AttValue("iter")
+iter = str(int(iter))
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Pull "Code" field from procedure sequence to name files
+code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE")
+
+# Pull PrT assignment quality report and format into a table for export
+list = Visum.Workbench.Lists.CreatePrTAssQualityList
+
+list.AddKeyColumns()
+
+list.AddColumn(Attribut="MEANABSVOLDIFFTOTAL")
+list.AddColumn(Attribut="MEANRELVOLDIFFTOTAL")
+list.AddColumn(Attribut="ASSIGNEDDEMAND")
+list.AddColumn(Attribut="VEHMITRAVPRT")
+list.AddColumn(Attribut="VEHHOURTRAVT0")
+list.AddColumn(Attribut="VEHHOURTRAVTCUR")
+list.AddColumn(Attribut="VEHHOURIMP")
+list.AddColumn(Attribut="TOTALEXCESSCOST")
+list.AddColumn(Attribut="AVGEXCESSCOST")
+list.AddColumn(Attribut="GAP")
+
+df = list.SaveToArray()
+
+
+df = pd.DataFrame(df)
+
+
+df = df.rename(columns={0 : 'Demand segment set code' , 1 : 'Iteration' , 2 : 'Mean absolute volume difference total' , 3 : 'Mean relative volume difference total' ,
+ 4 : 'Assigned demand' , 5 : 'Vehicle miles traveled PrT' , 6 : 'Vehicle hours traveled t0' , 7 : 'Vehicle hours traveled tCur' ,
+ 8 : 'Vehicle hour impedance' , 9 : 'Total excess cost' , 10: 'Mean excess cost', 11: 'Gap'})
+
+df.to_csv(proj_dir+"outputs/reports/ModelRun_"+date+"/PrT Assignment Quality Reports/"+code+"_Iter"+iter+".csv")
+
diff --git a/ORMetroModel/scripts/PrTSkim_PostProcessing.py b/ORMetroModel/scripts/PrTSkim_PostProcessing.py
new file mode 100644
index 0000000..2253a59
--- /dev/null
+++ b/ORMetroModel/scripts/PrTSkim_PostProcessing.py
@@ -0,0 +1,64 @@
+# Post-processing PrT skims from Visum based on "Code" field in Procedure Sequence
+# 6/17/2025 - Luke Gordon (RSG)
+# Adapted from code from Chetan Joshi (PTV)
+
+import tables
+import numpy as np
+import pandas as pd
+import os
+import VisumPy.helpers as h
+import openmatrix as omx
+
+
+def prtskim_postprocessing(mtx_dseg):
+
+ # Pull Intrazonal attributes out to set to diagonals
+ intrtime = np.array(h.GetMulti(Visum.Net.Zones,r"intrtime" , activeOnly = True))
+ intrdist = np.array(h.GetMulti(Visum.Net.Zones,r"intrdist" , activeOnly = True))
+ intrdist_wlk = np.array(h.GetMulti(Visum.Net.Zones,r"intrdist_wlk", activeOnly = True))
+
+ # Pull, Process, and Set matrices as needed by DSegCode
+ if mtx_dseg == 'wlk': # Run Walk distance processing
+ # Pull
+ dis_wlk = h.GetMatrixRaw(Visum, {"CODE": "DIS" , "DSegCode": mtx_dseg}) # Walk Distance
+ # Process
+ np.fill_diagonal(dis_wlk, intrdist_wlk)
+ # Set
+ h.SetMatrixRaw(Visum, {"CODE": "DIS" , "DSegCode": mtx_dseg}, dis_wlk)
+
+ else: # Run Auto distance, time, and tolls processing
+ # Pull
+ dis = h.GetMatrixRaw(Visum, {"CODE": "DIS" , "DSegCode": mtx_dseg}) # Distance
+ time = h.GetMatrixRaw(Visum, {"CODE": "AD1" , "DSegCode": mtx_dseg}) # Time
+ toll = h.GetMatrixRaw(Visum, {"CODE": "TOL" , "DSegCode": mtx_dseg}) # Tolls
+ # Process
+ np.fill_diagonal(dis, intrdist) # Distance
+ np.fill_diagonal(time, intrtime) # Time
+ time = np.where(time == 9999.00, 9999.00, time / 60) # Seconds to Minutes
+ np.fill_diagonal(toll, 0) # Tolls
+ # Set
+ h.SetMatrixRaw(Visum, {"CODE": "DIS" , "DSegCode": mtx_dseg}, dis)
+ h.SetMatrixRaw(Visum, {"CODE": "AD1" , "DSegCode": mtx_dseg}, time)
+ h.SetMatrixRaw(Visum, {"CODE": "TOL" , "DSegCode": mtx_dseg}, toll)
+
+
+
+
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+dseg = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like 'AM' from AM in the code box
+prtskim_postprocessing(dseg)
+
+
+## Pull "Code" field from procedure sequence containing Code, DSegCode, and filename
+#procedure_code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like -> '[["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]'
+#procedure_codes = eval(procedure_code) # Example: outputs a list of lists like -> [["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]
+#
+## Loop thru each matrix set in the "Code" field and export
+#for x in range(len(procedure_codes)):
+# dsegcode = procedure_codes[x][0]
+#
+# prtskim_postprocessing(dsegcode)
+
diff --git a/ORMetroModel/scripts/PuTAssignmentStats_Export.py b/ORMetroModel/scripts/PuTAssignmentStats_Export.py
new file mode 100644
index 0000000..1b985f6
--- /dev/null
+++ b/ORMetroModel/scripts/PuTAssignmentStats_Export.py
@@ -0,0 +1,66 @@
+# LCOG: Pull PrT Assignment Quality Report from Visum and save as csv file
+
+"""
+created 5/21/2025
+
+@author: edna.aguilar
+
+"""
+
+# Libraries
+import VisumPy.helpers
+import VisumPy.excel
+import pandas as pd
+import numpy as np
+import csv
+#from datetime import datetime
+import math
+import os.path
+
+
+# Pull timestamp for folder name and iteration number from Visum network attribute
+date = Visum.Net.AttValue("output_date")
+iter = Visum.Net.AttValue("iter")
+iter = str(int(iter))
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Pull "Code" field from procedure sequence to name files
+code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE")
+
+# Pull PrT assignment quality report and format into a table for export
+list = Visum.Workbench.Lists.CreatePuTStatList
+
+list.AddKeyColumns()
+
+attributes = [
+ "MeanJourneyTimePut", "MeanRideTimePuT", "MeanInVehTimePuT",
+ "MeanTransferWaitTimePut", "MeanOriginWaitTimePuT", "MeanWalkTimePuT",
+ "MeanPuTAuxTimePuT", "MeanSharingTravelTimePuT", "MeanAccessTimePuT",
+ "MeanEgressTimePuT", "MeanJourneyDistPuT", "MeanRideDistPuT",
+ "MeanNumTransfersPuT",
+ "TotalJourneyTimePuT", "TotalRideTimePuT", "TotalInVehTimePuT",
+ "TotalTransferWaitTimePuT", "TotalOriginWaitTimePuT", "TotalWalkTimePuT",
+ "TotalPuTAuxTimePut", "TotalSharingTravelTimePuT", "TotalAccessTimePuT",
+ "TotalEgressTimePuT", "TotalJourneyDistPuT", "TotalRideDistPuT",
+ "TotalNumTransfersPuT",
+ "PTripsUnlinkedPuT", "PTripsLinkedTot", "PTripsLinked0", "PTripsLinked1",
+ "PTripsLinked2", "PTripsLinkedGt2", "PTripsLinkedWRide",
+ "PTripsLinkedWoRide", "PTripsLinkedWoCon"
+]
+
+for attr in attributes:
+ list.AddColumn(Attribut=attr)
+
+df = pd.DataFrame(list.SaveToArray())
+
+# Convert to long format
+data_only = df.iloc[0]
+data_only.index = attributes
+df_long = data_only.reset_index()
+df_long.columns = ['Attribute', 'Value']
+
+# Save
+df_long.to_csv(proj_dir + "outputs/reports/ModelRun_" + date + "/PuT Assignment Stats/PuTAssignment_" + code + "_Iter" + iter + ".csv")
+
diff --git a/ORMetroModel/scripts/PuTSkim_PostProcessing.py b/ORMetroModel/scripts/PuTSkim_PostProcessing.py
new file mode 100644
index 0000000..62549d0
--- /dev/null
+++ b/ORMetroModel/scripts/PuTSkim_PostProcessing.py
@@ -0,0 +1,130 @@
+# Post-processing PuT skims from Visum based on "Code" field in Procedure Sequence
+# 6/16/2025 - Luke Gordon (RSG)
+# Adapted from code from Chetan Joshi (PTV)
+
+import tables
+import numpy as np
+import pandas as pd
+import os
+import VisumPy.helpers as h
+import openmatrix as omx
+
+
+# Create function to build skim matrices that weren't built during the skimming procedure itself if they don't yet exist
+def creatematrices(code,mtx_dseg):
+ try:
+ mx = h.GetMatrixRaw(Visum, {"CODE": code , "DSegCode": mtx_dseg})
+ except Exception as e:
+ mx = Visum.Net.AddMatrix(No=-1,ObjectTypeRef=2,MatrixType=4)
+ mx.SetAttValue("CODE",code)
+ mx.SetAttValue("DSegCode",mtx_dseg)
+
+
+def putskim_postprocessing(mtx_dseg,knr_flag):
+
+ # Build matrices if they don't yet exist
+ creatematrices("NBR" ,mtx_dseg)
+ creatematrices("IVTT",mtx_dseg)
+ creatematrices("STC" ,mtx_dseg)
+ #creatematrices("OVT" ,mtx_dseg)
+ creatematrices("VTC" ,mtx_dseg)
+
+ # Pull matrices out of Visum as numpy arrays
+ wkt = h.GetMatrixRaw(Visum, {"CODE": "WKT" , "DSegCode": mtx_dseg}) # Walk time
+ act = h.GetMatrixRaw(Visum, {"CODE": "ACT" , "DSegCode": mtx_dseg}) # Access time
+ egt = h.GetMatrixRaw(Visum, {"CODE": "EGT" , "DSegCode": mtx_dseg}) # Egress time
+ ntr = h.GetMatrixRaw(Visum, {"CODE": "NTR" , "DSegCode": mtx_dseg}) # Number of transfers
+ nbr = h.GetMatrixRaw(Visum, {"CODE": "NBR" , "DSegCode": mtx_dseg}) # Number of boardings
+ wowt = h.GetMatrixRaw(Visum, {"CODE": "WOWT" , "DSegCode": mtx_dseg}) # Weighted origin wait time
+ wtwt = h.GetMatrixRaw(Visum, {"CODE": "WTWT" , "DSegCode": mtx_dseg}) # Weighted transfer wait time
+ ivtt = h.GetMatrixRaw(Visum, {"CODE": "IVTT" , "DSegCode": mtx_dseg}) # In-vehicle travel time
+ ivtt_a = h.GetMatrixRaw(Visum, {"CODE": "IVTT(a)" , "DSegCode": mtx_dseg}) # In-vehicle travel time (BRT)
+ ivtt_b = h.GetMatrixRaw(Visum, {"CODE": "IVTT(b)" , "DSegCode": mtx_dseg}) # In-vehicle travel time (Bus)
+ ivtt_e = h.GetMatrixRaw(Visum, {"CODE": "IVTT(e)" , "DSegCode": mtx_dseg}) # In-vehicle travel time (Streetcar)
+ ivtt_l = h.GetMatrixRaw(Visum, {"CODE": "IVTT(l)" , "DSegCode": mtx_dseg}) # In-vehicle travel time (LRT)
+ ivtt_r = h.GetMatrixRaw(Visum, {"CODE": "IVTT(r)" , "DSegCode": mtx_dseg}) # In-vehicle travel time (WES)
+ pla = h.GetMatrixRaw(Visum, {"CODE": "PLA" , "DSegCode": mtx_dseg}) # Stop type constant (raw sum)
+ stc = h.GetMatrixRaw(Visum, {"CODE": "STC" , "DSegCode": mtx_dseg}) # Stop type constant (final average)
+ #ovt = h.GetMatrixRaw(Visum, {"CODE": "OVT" , "DSegCode": mtx_dseg}) # Out of vehicle time
+ vtc = h.GetMatrixRaw(Visum, {"CODE": "VTC" , "DSegCode": mtx_dseg}) # Vehicle type constant
+
+ # Process matrices
+ # Walk time
+ if knr_flag == 'wtw':
+ wkt = np.minimum(wkt + act + egt , 9999.00)
+ wkt = np.where((wkt < 0.01) | (wkt > 20.0) , 9999.00, wkt)
+ np.fill_diagonal(wkt, 9999.00)
+ elif knr_flag == 'ktw':
+ wkt = np.minimum(wkt + egt , 9999.00)
+ wkt = np.where((wkt < 0.01) | (wkt > 20.0) , 9999.00, wkt)
+ np.fill_diagonal(wkt, 9999.00)
+ elif knr_flag == 'wtk':
+ wkt = np.minimum(wkt + act , 9999.00)
+ wkt = np.where((wkt < 0.01) | (wkt > 20.0) , 9999.00, wkt)
+ np.fill_diagonal(wkt, 9999.00)
+
+
+ # Weighted origin wait time
+ wowt = np.where(wkt == 9999.00, 9999.00, wowt)
+ # Perceived In-vehicle time by Mode
+ if mtx_dseg == 'amPuT' or mtx_dseg == 'pmPuT': # Peak
+ ivtt_a = np.where(ivtt_a == 9999.00, 9999.00, ivtt_a * 0.95)
+ ivtt_l = np.where(ivtt_l == 9999.00, 9999.00, ivtt_l * 0.88)
+ ivtt_r = np.where(ivtt_r == 9999.00, 9999.00, ivtt_r * 0.88)
+ else: # Off-Peak
+ ivtt_a = np.where(ivtt_a == 9999.00, 9999.00, ivtt_a * 0.95)
+ ivtt_l = np.where(ivtt_l == 9999.00, 9999.00, ivtt_l * 0.86)
+ ivtt_r = np.where(ivtt_r == 9999.00, 9999.00, ivtt_r * 0.86)
+ # Number of boardings
+ nbr = np.where(ntr == 9999.00, 9999.00, ntr + 1)
+ # Stop type constant
+ stc = np.where((pla == 9999.00) | (nbr == 9999.00) , 9999.00, pla / nbr)
+ stc = np.where(stc == 9999.00, 0.00, stc)
+ # In-Vehicle time
+ ivtt = np.minimum(ivtt_a + ivtt_b + ivtt_e + ivtt_l + ivtt_r, 9999.00)
+ np.fill_diagonal(ivtt, 9999.00)
+ # Out of vehicle time
+ #ovt = np.minimum(wowt + wtwt + wkt, 9999.00)
+ # Vehicle type constant
+ if mtx_dseg == 'amPuT' or mtx_dseg == 'pmPuT': # Peak
+ vtc = np.where((ivtt == 9999.00) | (ivtt == 0.00), 9999.00,
+ ((0.0557 * ivtt_a) / ivtt) + ((0.000 * ivtt_e) / ivtt) + ((0.1858 * ivtt_l) / ivtt) + ((0.1858 * ivtt_r) / ivtt))
+ else: # Off-Peak
+ vtc = np.where((ivtt == 9999.00) | (ivtt == 0.00), 9999.00,
+ ((0.0432 * ivtt_a) / ivtt) + ((0.0984 * ivtt_e) / ivtt) + ((0.1442 * ivtt_l) / ivtt) + ((0.1442 * ivtt_r) / ivtt))
+
+ # Set matrices in Visum
+ h.SetMatrixRaw(Visum, {"CODE": "WKT" , "DSegCode": mtx_dseg}, wkt ) # Walk time
+ h.SetMatrixRaw(Visum, {"CODE": "ACT" , "DSegCode": mtx_dseg}, act ) # Access time
+ h.SetMatrixRaw(Visum, {"CODE": "EGT" , "DSegCode": mtx_dseg}, egt ) # Egress time
+ h.SetMatrixRaw(Visum, {"CODE": "NTR" , "DSegCode": mtx_dseg}, ntr ) # Number of transfers
+ h.SetMatrixRaw(Visum, {"CODE": "NBR" , "DSegCode": mtx_dseg}, nbr ) # Number of boardings
+ h.SetMatrixRaw(Visum, {"CODE": "WOWT" , "DSegCode": mtx_dseg}, wowt ) # Weighted origin wait time
+ h.SetMatrixRaw(Visum, {"CODE": "WTWT" , "DSegCode": mtx_dseg}, wtwt ) # Weighted transfer wait time
+ h.SetMatrixRaw(Visum, {"CODE": "IVTT" , "DSegCode": mtx_dseg}, ivtt ) # In-vehicle travel time
+ h.SetMatrixRaw(Visum, {"CODE": "IVTT(a)" , "DSegCode": mtx_dseg}, ivtt_a) # In-vehicle travel time (BRT)
+ h.SetMatrixRaw(Visum, {"CODE": "IVTT(b)" , "DSegCode": mtx_dseg}, ivtt_b) # In-vehicle travel time (Bus)
+ h.SetMatrixRaw(Visum, {"CODE": "IVTT(e)" , "DSegCode": mtx_dseg}, ivtt_e) # In-vehicle travel time (Streetcar)
+ h.SetMatrixRaw(Visum, {"CODE": "IVTT(l)" , "DSegCode": mtx_dseg}, ivtt_l) # In-vehicle travel time (LRT)
+ h.SetMatrixRaw(Visum, {"CODE": "IVTT(r)" , "DSegCode": mtx_dseg}, ivtt_r) # In-vehicle travel time (WES)
+ h.SetMatrixRaw(Visum, {"CODE": "PLA" , "DSegCode": mtx_dseg}, pla ) # Stop type constant (raw sum)
+ h.SetMatrixRaw(Visum, {"CODE": "STC" , "DSegCode": mtx_dseg}, stc ) # Stop type constant (final average)
+ #h.SetMatrixRaw(Visum, {"CODE": "OVT" , "DSegCode": mtx_dseg}, ovt ) # Out of vehicle time
+ h.SetMatrixRaw(Visum, {"CODE": "VTC" , "DSegCode": mtx_dseg}, vtc ) # Vehicle type constant
+
+
+
+# Read user inputs from Visum
+proj_dir = Visum.GetPath(2)
+
+# Pull "Code" field from procedure sequence containing Code, DSegCode, and filename
+procedure_code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like -> '[["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]'
+procedure_codes = eval(procedure_code) # Example: outputs a list of lists like -> [["mfamsov","PuT","AM2_SOV.omx"],["mfmdMpe","PuT","MD1_MPE.omx"]]
+
+# Loop thru each matrix set in the "Code" field and export
+for x in range(len(procedure_codes)):
+ dsegcode = procedure_codes[x][0]
+ knr_flag = procedure_codes[x][1]
+
+putskim_postprocessing(dsegcode,knr_flag)
+
diff --git a/ORMetroModel/scripts/PuT_Skimming_Setup.py b/ORMetroModel/scripts/PuT_Skimming_Setup.py
new file mode 100644
index 0000000..8c46c08
--- /dev/null
+++ b/ORMetroModel/scripts/PuT_Skimming_Setup.py
@@ -0,0 +1,396 @@
+# Prep for Skimming for Oregon Metro
+# 6/12/2025 - Luke Gordon (RSG)
+# Adapted from code from Chetan Joshi (PTV)
+
+import tables
+import numpy as np
+import pandas as pd
+import os
+import VisumPy.helpers as h
+import openmatrix as omx
+
+
+def skim_setup(period):
+
+ def ismode(): # isbrt, isscr, islrt, & iswes
+
+ # Pull attributes
+ sa_isbrt = h.GetMulti(Visum.Net.StopAreas,r"isbrt", activeOnly = True)
+ sa_isscr = h.GetMulti(Visum.Net.StopAreas,r"isscr", activeOnly = True)
+ sa_islrt = h.GetMulti(Visum.Net.StopAreas,r"islrt", activeOnly = True)
+ sa_iswes = h.GetMulti(Visum.Net.StopAreas,r"iswes", activeOnly = True)
+ sp_lrtsyscode = h.GetMulti(Visum.Net.StopAreas,r"FIRST:STOPPOINTS\DISTINCT:LINEROUTES\TSYSCODE", activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [sa_isbrt,sa_isscr,sa_islrt,sa_iswes,sp_lrtsyscode]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['sa_isbrt','sa_isscr','sa_islrt','sa_iswes','sp_lrtsyscode'])
+
+ # Break out 'DISTINCT:LINEROUTES\TSYSCODE' field to separate by commas into individual columns
+ df[['sp_lrtsyscode']] = df[['sp_lrtsyscode']].astype(str)
+ df = pd.concat([df,df['sp_lrtsyscode'].str.split(',', expand = True)], axis = 1)
+ # Change Screenline field names
+ if 1 not in df:
+ df[1] = None
+ if 2 not in df:
+ df[2] = None
+ if 3 not in df:
+ df[3] = None
+ if 4 not in df:
+ df[4] = None
+ df = df.rename(columns = {0:'Mode1',1:'Mode2',2:'Mode3',3:'Mode4',4:'Mode5'})
+
+ # Calculate fields
+ df['isbrt'] = df.apply(lambda row: 1 if row['Mode1'] == 'a' or row['Mode2'] == 'a' or row['Mode3'] == 'a' or row['Mode4'] == 'a' or row['Mode5'] == 'a' else 0, axis=1)
+ df['isscr'] = df.apply(lambda row: 1 if row['Mode1'] == 'e' or row['Mode2'] == 'e' or row['Mode3'] == 'e' or row['Mode4'] == 'e' or row['Mode5'] == 'e' else 0, axis=1)
+ df['islrt'] = df.apply(lambda row: 1 if row['Mode1'] == 'l' or row['Mode2'] == 'l' or row['Mode3'] == 'l' or row['Mode4'] == 'l' or row['Mode5'] == 'l' else 0, axis=1)
+ df['iswes'] = df.apply(lambda row: 1 if row['Mode1'] == 'r' or row['Mode2'] == 'r' or row['Mode3'] == 'r' or row['Mode4'] == 'r' or row['Mode5'] == 'r' else 0, axis=1)
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.StopAreas ,r"isbrt", df['isbrt'])
+ h.SetMulti(Visum.Net.StopAreas ,r"isscr", df['isscr'])
+ h.SetMulti(Visum.Net.StopAreas ,r"islrt", df['islrt'])
+ h.SetMulti(Visum.Net.StopAreas ,r"iswes", df['iswes'])
+
+ def headway(period): # Headway and Headway_Halved
+
+ # Pull attributes
+ tp_emmeheadway = h.GetMulti(Visum.Net.TimeProfiles,r"Emme_Headway", activeOnly = True)
+ tp_headwayhalved = h.GetMulti(Visum.Net.TimeProfiles,r"Headway_Halved", activeOnly = True)
+
+ if period == 'AM' or period == 'PM':
+ tp_periodheadway = h.GetMulti(Visum.Net.TimeProfiles,r"LINEROUTE\EMME_DATA1", activeOnly = True)
+ else:
+ tp_periodheadway = h.GetMulti(Visum.Net.TimeProfiles,r"LINEROUTE\EMME_DATA2", activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [tp_emmeheadway,tp_periodheadway,tp_headwayhalved]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['tp_emmeheadway','tp_periodheadway','tp_headwayhalved'])
+
+ # Calculate fields
+ df['tp_emmeheadway'] = df['tp_periodheadway'] * 60
+ df['tp_headwayhalved'] = df['tp_emmeheadway'] / 2
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.TimeProfiles ,r"Emme_Headway", df['tp_emmeheadway'])
+ h.SetMulti(Visum.Net.TimeProfiles ,r"Headway_Halved", df['tp_headwayhalved'])
+
+
+ def op_bushr(): # op_bushr on lineroutes and stopareas
+
+ # Line Routes
+ # Pull attributes
+ lr_opbushr = h.GetMulti(Visum.Net.LineRoutes,r"op_bushr", activeOnly = True)
+ lr_opheadway = h.GetMulti(Visum.Net.LineRoutes,r"EMME_DATA2", activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [lr_opbushr,lr_opheadway]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['lr_opbushr','lr_opheadway'])
+
+ # Calculate fields
+ df['lr_opbushr'] = df.apply(lambda row: 1 / (row['lr_opheadway'] / 60) if row['lr_opheadway'] != 0 else 0,axis=1)
+
+ # Set field back in Visum
+ h.SetMulti(Visum.Net.LineRoutes ,r"op_bushr", df['lr_opbushr'])
+
+
+ # Stop Areas
+ # Pull attributes
+ sa_lropbushr = h.GetMulti(Visum.Net.StopAreas,r"FIRST:STOPPOINTS\SUM:LINEROUTES\OP_BUSHR", activeOnly = True)
+
+ # Set field back in Visum
+ h.SetMulti(Visum.Net.StopAreas ,r"op_bushr", sa_lropbushr)
+
+
+ def stoptype(period): # Stop type & stop type constant
+
+ # Pull attributes
+ # Fields to set
+ sa_sttyp = h.GetMulti(Visum.Net.StopAreas,r"sttyp" , activeOnly = True)
+ sa_stcon = h.GetMulti(Visum.Net.StopAreas,r"stcon" , activeOnly = True)
+ # Condition Fields
+ sa_istc = h.GetMulti(Visum.Net.StopAreas,r"istc" , activeOnly = True)
+ sa_istm = h.GetMulti(Visum.Net.StopAreas,r"istm" , activeOnly = True)
+ sa_isbrt = h.GetMulti(Visum.Net.StopAreas,r"isbrt" , activeOnly = True)
+ sa_isscr = h.GetMulti(Visum.Net.StopAreas,r"isscr" , activeOnly = True)
+ sa_islrt = h.GetMulti(Visum.Net.StopAreas,r"islrt" , activeOnly = True)
+ sa_iswes = h.GetMulti(Visum.Net.StopAreas,r"iswes" , activeOnly = True)
+ sa_opbushr = h.GetMulti(Visum.Net.StopAreas,r"op_bushr", activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [sa_sttyp,sa_stcon,sa_istc,sa_istm ,sa_isbrt,sa_isscr,sa_islrt,sa_iswes,sa_opbushr]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['sa_sttyp','sa_stcon','sa_istc','sa_istm','sa_isbrt','sa_isscr','sa_islrt','sa_iswes','sa_opbushr'])
+
+ # Convert condition fields & stop type constant to float
+ df[['sa_stcon','sa_istc','sa_istm','sa_isbrt','sa_isscr','sa_islrt','sa_iswes','sa_opbushr']] = df[[
+ 'sa_stcon','sa_istc','sa_istm','sa_isbrt','sa_isscr','sa_islrt','sa_iswes','sa_opbushr']].astype(float)
+
+ # Calculate Stop Type field
+ for x in range(len(df)):
+ if ((df.at[x,'sa_istc'] == 1) | (df.at[x,'sa_istm'] == 1) | (df.at[x,'sa_islrt'] == 1) | (df.at[x,'sa_iswes'] == 1)):
+ df.at[x,'sa_sttyp'] = 'A,B,C' # Is transit center, is transit mall, is LRT stop, is WES stop
+ elif ((df.at[x,'sa_isbrt'] == 1) | (df.at[x,'sa_isscr'] == 1) | (df.at[x,'sa_opbushr'] >= 4)):
+ df.at[x,'sa_sttyp'] = 'D' # Is BRT stop, is streetcar stop, has >= 4 transit vehicles per hour
+ else:
+ df.at[x,'sa_sttyp'] = 'E' # All stops with infrequent local bus service
+
+
+ # Calculate Stop Type Constant field
+ for y in range(len(df)):
+ if period == 'AM' or period == 'PM': # Peak
+ if df.at[y,'sa_sttyp'] == 'A,B,C':
+ df.at[y,'sa_stcon'] = 0.1582
+ elif df.at[y,'sa_sttyp'] == 'D':
+ df.at[y,'sa_stcon'] = 0.0531
+ elif df.at[y,'sa_sttyp'] == 'E':
+ df.at[y,'sa_stcon'] = 0.0000
+ else: # Off-Peak
+ if df.at[y,'sa_sttyp'] == 'A,B,C':
+ df.at[y,'sa_stcon'] = 0.1075
+ elif df.at[y,'sa_sttyp'] == 'D':
+ df.at[y,'sa_stcon'] = 0.0756
+ elif df.at[y,'sa_sttyp'] == 'E':
+ df.at[y,'sa_stcon'] = 0.0000
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.StopAreas ,r"sttyp", df['sa_sttyp'])
+ h.SetMulti(Visum.Net.StopAreas ,r"stcon", df['sa_stcon'])
+
+
+ def waittime(period): # Wait time perception factor
+
+ # Pull attributes
+ sa_sttyp = h.GetMulti(Visum.Net.StopAreas,r"sttyp" , activeOnly = True)
+ sa_wtpf = h.GetMulti(Visum.Net.StopAreas,r"wtpf" , activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [sa_sttyp,sa_wtpf]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['sa_sttyp','sa_wtpf'])
+
+ # Calculate field
+ for y in range(len(df)):
+ if period == 'AM' or period == 'PM': # Peak
+ if df.at[y,'sa_sttyp'] == 'A,B,C':
+ df.at[y,'sa_wtpf'] = 0.88
+ elif df.at[y,'sa_sttyp'] == 'D':
+ df.at[y,'sa_wtpf'] = 0.93
+ elif df.at[y,'sa_sttyp'] == 'E':
+ df.at[y,'sa_wtpf'] = 1.00
+ else: # Off-Peak
+ if df.at[y,'sa_sttyp'] == 'A,B,C':
+ df.at[y,'sa_wtpf'] = 0.86
+ elif df.at[y,'sa_sttyp'] == 'D':
+ df.at[y,'sa_wtpf'] = 0.94
+ elif df.at[y,'sa_sttyp'] == 'E':
+ df.at[y,'sa_wtpf'] = 1.00
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.StopAreas ,r"wtpf", df['sa_wtpf'])
+
+
+ def invehicleperceptionfactor(period): # In-vehicle perception factors
+
+ # Pull attributes
+ tp_tsyscode = h.GetMulti(Visum.Net.TimeProfiles,r"TSYSCODE", activeOnly = True)
+ tp_ivpf = h.GetMulti(Visum.Net.TimeProfiles,r"ivpf" , activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [tp_tsyscode,tp_ivpf]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['tp_tsyscode','tp_ivpf'])
+
+ # Calculate field
+ for y in range(len(df)):
+ if period == 'AM' or period == 'PM': # Peak
+ if df.at[y,'tp_tsyscode'] == 'a':
+ df.at[y,'tp_ivpf'] = 0.95
+ elif ((df.at[y,'tp_tsyscode'] == 'l') or (df.at[y,'tp_tsyscode'] == 'r')):
+ df.at[y,'tp_ivpf'] = 0.88
+ else:
+ df.at[y,'tp_ivpf'] = 1.00
+ else: # Off-Peak
+ if df.at[y,'tp_tsyscode'] == 'a':
+ df.at[y,'tp_ivpf'] = 0.95
+ elif ((df.at[y,'tp_tsyscode'] == 'l') or (df.at[y,'tp_tsyscode'] == 'r')):
+ df.at[y,'tp_ivpf'] = 0.86
+ else:
+ df.at[y,'tp_ivpf'] = 1.00
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.TimeProfiles ,r"ivpf", df['tp_ivpf'])
+
+
+ def boardingpenalty(period): # Boarding penalty (seconds)
+
+ # Pull attributes
+ tp_tsyscode = h.GetMulti(Visum.Net.TimeProfiles,r"TSYSCODE", activeOnly = True)
+ tp_brdpen = h.GetMulti(Visum.Net.TimeProfiles,r"brdpen" , activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [tp_tsyscode,tp_brdpen]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['tp_tsyscode','tp_brdpen'])
+
+ # Calculate field
+ for y in range(len(df)):
+ if period == 'AM' or period == 'PM': # Peak
+ if ((df.at[y,'tp_tsyscode'] == 'l') or (df.at[y,'tp_tsyscode'] == 'r')):
+ df.at[y,'tp_brdpen'] = 000.0
+ elif ((df.at[y,'tp_tsyscode'] == 'a') or (df.at[y,'tp_tsyscode'] == 'e')):
+ df.at[y,'tp_brdpen'] = 372.0
+ else:
+ df.at[y,'tp_brdpen'] = 439.8
+ else: # Off-Peak
+ if ((df.at[y,'tp_tsyscode'] == 'l') or (df.at[y,'tp_tsyscode'] == 'r')):
+ df.at[y,'tp_brdpen'] = 000.0
+ elif ((df.at[y,'tp_tsyscode'] == 'a') or (df.at[y,'tp_tsyscode'] == 'e')):
+ df.at[y,'tp_brdpen'] = 166.8
+ else:
+ df.at[y,'tp_brdpen'] = 540.6
+
+ # Add global boarding penalty
+ for y in range(len(df)):
+ df.at[y,'tp_brdpen'] = df.at[y,'tp_brdpen'] + 231
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.TimeProfiles ,r"brdpen", df['tp_brdpen'])
+
+
+ def dwelltime():
+
+ # Pull attributes
+ tpi_tsyscode = h.GetMulti(Visum.Net.TimeProfileItems,r"TIMEPROFILE\TSYSCODE" , activeOnly = True)
+ tpi_emmedwt = h.GetMulti(Visum.Net.TimeProfileItems,r"LINEROUTEITEM\EMME_DWT", activeOnly = True)
+ tpi_prelength = h.GetMulti(Visum.Net.TimeProfileItems,r"PreLength" , activeOnly = True)
+ tpi_dwelltime = h.GetMulti(Visum.Net.TimeProfileItems,r"DWELL_TIME" , activeOnly = True)
+
+
+ # Make Visum list with link data
+ att_list = [tpi_tsyscode,tpi_emmedwt,tpi_prelength,tpi_dwelltime]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['tpi_tsyscode','tpi_emmedwt','tpi_prelength','tpi_dwelltime'])
+
+ # Convert fields to float
+ df[['tpi_emmedwt','tpi_prelength','tpi_dwelltime']] = df[['tpi_emmedwt','tpi_prelength','tpi_dwelltime']].astype(float)
+
+ # Calculate field
+ for y in range(len(df)):
+ if df.at[y,'tpi_tsyscode'] == 'b': # Local Bus dwell time based on prelength and Emme_DWT
+ df.at[y,'tpi_dwelltime'] = df.at[y,'tpi_emmedwt'] * 60 * df.at[y,'tpi_prelength']
+ else: # Non-Local Bus dwell time based on Emme_DWT only
+ df.at[y,'tpi_dwelltime'] = df.at[y,'tpi_emmedwt'] * 60
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.TimeProfileItems ,r"DWELL_TIME", df['tpi_dwelltime'])
+
+
+ def calc_ttf(ft, timau, length, us1, default_speed): # INPUT TO RUNTIME FUNCTION
+ if us1 <= 0:
+ us1 = default_speed
+
+ if length:
+ transit_time = 3600*length / default_speed
+
+ if timau < 999:
+ if ft == 1:
+ transit_time = timau * 1.15
+ elif ft == 2:
+ transit_time = timau * 1.20
+ elif ft == 3:
+ transit_time = timau
+ elif ft == 4:
+ transit_time = timau * 1.09
+ elif ft == 5:
+ transit_time = 60* (60 * length / us1)
+ elif ft == 6:
+ transit_time = 60* (60 * length / us1 + 60 * length / 180)
+ elif ft ==11:
+ transit_time = timau * 1.15
+ elif ft ==12:
+ transit_time = timau * 1.30
+ elif ft ==13:
+ transit_time = timau
+ elif ft ==14:
+ transit_time =timau * 1.09
+ elif ft == 15:
+ transit_time = 60* (60 * length / us1)
+ elif ft == 16:
+ transit_time = 60* (60 * length / us1 + 60 * length / 180)
+ elif ft == 21:
+ transit_time = timau * 1.05
+ elif ft == 22:
+ transit_time = timau * 1.03
+ else:
+ transit_time = 1
+
+ return transit_time
+
+ def runtime(period):
+ tpitems = Visum.Net.TimeProfileItems.GetMultipleAttributes(["LINEROUTEITEM\\EMME_TTFINDEX", "SUM:USEDLINEROUTEITEMS\\OUTLINK\\"+period+"_TTC","SUM:USEDLINEROUTEITEMS\\POSTLENGTH",
+ "LINEROUTEITEM\\EMME_DATA1"])
+
+ #else:
+ # tpitems = Visum.Net.TimeProfileItems.GetMultipleAttributes(["LINEROUTEITEM\\EMME_TTFINDEX", "SUM:USEDLINEROUTEITEMS\\OUTLINK\\AddVal3","SUM:USEDLINEROUTEITEMS\\POSTLENGTH",
+ # "LINEROUTEITEM\\EMME_DATA1"])
+ result = []
+
+ default_speed = 30
+ for ft, timau, length, us1 in tpitems:
+ haul_time = calc_ttf(ft, timau, length, us1, default_speed)
+ result.append([haul_time, ])
+
+ Visum.Net.TimeProfileItems.SetMultipleAttributes(["AddVal"], result)
+
+ def combinerundwelltimes():
+
+ # Pull attributes
+ tpi_addval = h.GetMulti(Visum.Net.TimeProfileItems,r"ADDVAL" , activeOnly = True)
+ tpi_dwelltime = h.GetMulti(Visum.Net.TimeProfileItems,r"DWELL_TIME" , activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [tpi_addval,tpi_dwelltime]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['tpi_addval','tpi_dwelltime'])
+
+ # Convert fields to float
+ df[['tpi_addval','tpi_dwelltime']] = df[['tpi_addval','tpi_dwelltime']].astype(float)
+
+ # Calculate field
+ df['tpi_addval'] = df['tpi_addval'] + df['tpi_dwelltime']
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.TimeProfileItems ,r"ADDVAL", df['tpi_addval'])
+
+
+
+ # RUN FUNCTIONS
+ # Setting Skim Attributes
+ ismode()
+ headway(period)
+ op_bushr()
+ stoptype(period)
+ waittime(period)
+ invehicleperceptionfactor(period)
+ boardingpenalty(period)
+ # Setting Dwell and Run times
+ dwelltime()
+ runtime(period)
+ combinerundwelltimes()
+
+
+per = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like 'AM' from AM in the code box
+skim_setup(per)
+
diff --git a/ORMetroModel/scripts/Skimming_Setup.py b/ORMetroModel/scripts/Skimming_Setup.py
new file mode 100644
index 0000000..54a1923
--- /dev/null
+++ b/ORMetroModel/scripts/Skimming_Setup.py
@@ -0,0 +1,396 @@
+# Prep for Skimming for Oregon Metro
+# 6/12/2025 - Luke Gordon (RSG)
+# Adapted from code from Chetan Joshi (PTV)
+
+import tables
+import numpy as np
+import pandas as pd
+import os
+import VisumPy.helpers as h
+import openmatrix as omx
+
+
+def put_skim_setup(period):
+
+ def ismode(): # isbrt, isscr, islrt, & iswes
+
+ # Pull attributes
+ sa_isbrt = h.GetMulti(Visum.Net.StopAreas,r"isbrt", activeOnly = True)
+ sa_isscr = h.GetMulti(Visum.Net.StopAreas,r"isscr", activeOnly = True)
+ sa_islrt = h.GetMulti(Visum.Net.StopAreas,r"islrt", activeOnly = True)
+ sa_iswes = h.GetMulti(Visum.Net.StopAreas,r"iswes", activeOnly = True)
+ sp_lrtsyscode = h.GetMulti(Visum.Net.StopAreas,r"FIRST:STOPPOINTS\DISTINCT:LINEROUTES\TSYSCODE", activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [sa_isbrt,sa_isscr,sa_islrt,sa_iswes,sp_lrtsyscode]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['sa_isbrt','sa_isscr','sa_islrt','sa_iswes','sp_lrtsyscode'])
+
+ # Break out 'DISTINCT:LINEROUTES\TSYSCODE' field to separate by commas into individual columns
+ df[['sp_lrtsyscode']] = df[['sp_lrtsyscode']].astype(str)
+ df = pd.concat([df,df['sp_lrtsyscode'].str.split(',', expand = True)], axis = 1)
+ # Change Screenline field names
+ if 1 not in df:
+ df[1] = None
+ if 2 not in df:
+ df[2] = None
+ if 3 not in df:
+ df[3] = None
+ if 4 not in df:
+ df[4] = None
+ df = df.rename(columns = {0:'Mode1',1:'Mode2',2:'Mode3',3:'Mode4',4:'Mode5'})
+
+ # Calculate fields
+ df['isbrt'] = df.apply(lambda row: 1 if row['Mode1'] == 'a' or row['Mode2'] == 'a' or row['Mode3'] == 'a' or row['Mode4'] == 'a' or row['Mode5'] == 'a' else 0, axis=1)
+ df['isscr'] = df.apply(lambda row: 1 if row['Mode1'] == 'e' or row['Mode2'] == 'e' or row['Mode3'] == 'e' or row['Mode4'] == 'e' or row['Mode5'] == 'e' else 0, axis=1)
+ df['islrt'] = df.apply(lambda row: 1 if row['Mode1'] == 'l' or row['Mode2'] == 'l' or row['Mode3'] == 'l' or row['Mode4'] == 'l' or row['Mode5'] == 'l' else 0, axis=1)
+ df['iswes'] = df.apply(lambda row: 1 if row['Mode1'] == 'r' or row['Mode2'] == 'r' or row['Mode3'] == 'r' or row['Mode4'] == 'r' or row['Mode5'] == 'r' else 0, axis=1)
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.StopAreas ,r"isbrt", df['isbrt'])
+ h.SetMulti(Visum.Net.StopAreas ,r"isscr", df['isscr'])
+ h.SetMulti(Visum.Net.StopAreas ,r"islrt", df['islrt'])
+ h.SetMulti(Visum.Net.StopAreas ,r"iswes", df['iswes'])
+
+ def headway(period): # Headway and Headway_Halved
+
+ # Pull attributes
+ tp_emmeheadway = h.GetMulti(Visum.Net.TimeProfiles,r"Emme_Headway", activeOnly = True)
+ tp_headwayhalved = h.GetMulti(Visum.Net.TimeProfiles,r"Headway_Halved", activeOnly = True)
+
+ if period == 'AM' or period == 'PM':
+ tp_periodheadway = h.GetMulti(Visum.Net.TimeProfiles,r"LINEROUTE\EMME_DATA1", activeOnly = True)
+ else:
+ tp_periodheadway = h.GetMulti(Visum.Net.TimeProfiles,r"LINEROUTE\EMME_DATA2", activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [tp_emmeheadway,tp_periodheadway,tp_headwayhalved]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['tp_emmeheadway','tp_periodheadway','tp_headwayhalved'])
+
+ # Calculate fields
+ df['tp_emmeheadway'] = df['tp_periodheadway'] * 60
+ df['tp_headwayhalved'] = df['tp_emmeheadway'] / 2
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.TimeProfiles ,r"Emme_Headway", df['tp_emmeheadway'])
+ h.SetMulti(Visum.Net.TimeProfiles ,r"Headway_Halved", df['tp_headwayhalved'])
+
+
+ def op_bushr(): # op_bushr on lineroutes and stopareas
+
+ # Line Routes
+ # Pull attributes
+ lr_opbushr = h.GetMulti(Visum.Net.LineRoutes,r"op_bushr", activeOnly = True)
+ lr_opheadway = h.GetMulti(Visum.Net.LineRoutes,r"EMME_DATA2", activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [lr_opbushr,lr_opheadway]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['lr_opbushr','lr_opheadway'])
+
+ # Calculate fields
+ df['lr_opbushr'] = df.apply(lambda row: 1 / (row['lr_opheadway'] / 60) if row['lr_opheadway'] != 0 else 0,axis=1)
+
+ # Set field back in Visum
+ h.SetMulti(Visum.Net.LineRoutes ,r"op_bushr", df['lr_opbushr'])
+
+
+ # Stop Areas
+ # Pull attributes
+ sa_lropbushr = h.GetMulti(Visum.Net.StopAreas,r"FIRST:STOPPOINTS\SUM:LINEROUTES\OP_BUSHR", activeOnly = True)
+
+ # Set field back in Visum
+ h.SetMulti(Visum.Net.StopAreas ,r"op_bushr", sa_lropbushr)
+
+
+ def stoptype(period): # Stop type & stop type constant
+
+ # Pull attributes
+ # Fields to set
+ sa_sttyp = h.GetMulti(Visum.Net.StopAreas,r"sttyp" , activeOnly = True)
+ sa_stcon = h.GetMulti(Visum.Net.StopAreas,r"stcon" , activeOnly = True)
+ # Condition Fields
+ sa_istc = h.GetMulti(Visum.Net.StopAreas,r"istc" , activeOnly = True)
+ sa_istm = h.GetMulti(Visum.Net.StopAreas,r"istm" , activeOnly = True)
+ sa_isbrt = h.GetMulti(Visum.Net.StopAreas,r"isbrt" , activeOnly = True)
+ sa_isscr = h.GetMulti(Visum.Net.StopAreas,r"isscr" , activeOnly = True)
+ sa_islrt = h.GetMulti(Visum.Net.StopAreas,r"islrt" , activeOnly = True)
+ sa_iswes = h.GetMulti(Visum.Net.StopAreas,r"iswes" , activeOnly = True)
+ sa_opbushr = h.GetMulti(Visum.Net.StopAreas,r"op_bushr", activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [sa_sttyp,sa_stcon,sa_istc,sa_istm ,sa_isbrt,sa_isscr,sa_islrt,sa_iswes,sa_opbushr]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['sa_sttyp','sa_stcon','sa_istc','sa_istm','sa_isbrt','sa_isscr','sa_islrt','sa_iswes','sa_opbushr'])
+
+ # Convert condition fields & stop type constant to float
+ df[['sa_stcon','sa_istc','sa_istm','sa_isbrt','sa_isscr','sa_islrt','sa_iswes','sa_opbushr']] = df[[
+ 'sa_stcon','sa_istc','sa_istm','sa_isbrt','sa_isscr','sa_islrt','sa_iswes','sa_opbushr']].astype(float)
+
+ # Calculate Stop Type field
+ for x in range(len(df)):
+ if ((df.at[x,'sa_istc'] == 1) | (df.at[x,'sa_istm'] == 1) | (df.at[x,'sa_islrt'] == 1) | (df.at[x,'sa_iswes'] == 1)):
+ df.at[x,'sa_sttyp'] = 'A,B,C' # Is transit center, is transit mall, is LRT stop, is WES stop
+ elif ((df.at[x,'sa_isbrt'] == 1) | (df.at[x,'sa_isscr'] == 1) | (df.at[x,'sa_opbushr'] >= 4)):
+ df.at[x,'sa_sttyp'] = 'D' # Is BRT stop, is streetcar stop, has >= 4 transit vehicles per hour
+ else:
+ df.at[x,'sa_sttyp'] = 'E' # All stops with infrequent local bus service
+
+
+ # Calculate Stop Type Constant field
+ for y in range(len(df)):
+ if period == 'AM' or period == 'PM': # Peak
+ if df.at[y,'sa_sttyp'] == 'A,B,C':
+ df.at[y,'sa_stcon'] = 0.1582
+ elif df.at[y,'sa_sttyp'] == 'D':
+ df.at[y,'sa_stcon'] = 0.0531
+ elif df.at[y,'sa_sttyp'] == 'E':
+ df.at[y,'sa_stcon'] = 0.0000
+ else: # Off-Peak
+ if df.at[y,'sa_sttyp'] == 'A,B,C':
+ df.at[y,'sa_stcon'] = 0.1075
+ elif df.at[y,'sa_sttyp'] == 'D':
+ df.at[y,'sa_stcon'] = 0.0756
+ elif df.at[y,'sa_sttyp'] == 'E':
+ df.at[y,'sa_stcon'] = 0.0000
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.StopAreas ,r"sttyp", df['sa_sttyp'])
+ h.SetMulti(Visum.Net.StopAreas ,r"stcon", df['sa_stcon'])
+
+
+ def waittime(period): # Wait time perception factor
+
+ # Pull attributes
+ sa_sttyp = h.GetMulti(Visum.Net.StopAreas,r"sttyp" , activeOnly = True)
+ sa_wtpf = h.GetMulti(Visum.Net.StopAreas,r"wtpf" , activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [sa_sttyp,sa_wtpf]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['sa_sttyp','sa_wtpf'])
+
+ # Calculate field
+ for y in range(len(df)):
+ if period == 'AM' or period == 'PM': # Peak
+ if df.at[y,'sa_sttyp'] == 'A,B,C':
+ df.at[y,'sa_wtpf'] = 0.88
+ elif df.at[y,'sa_sttyp'] == 'D':
+ df.at[y,'sa_wtpf'] = 0.93
+ elif df.at[y,'sa_sttyp'] == 'E':
+ df.at[y,'sa_wtpf'] = 1.00
+ else: # Off-Peak
+ if df.at[y,'sa_sttyp'] == 'A,B,C':
+ df.at[y,'sa_wtpf'] = 0.86
+ elif df.at[y,'sa_sttyp'] == 'D':
+ df.at[y,'sa_wtpf'] = 0.94
+ elif df.at[y,'sa_sttyp'] == 'E':
+ df.at[y,'sa_wtpf'] = 1.00
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.StopAreas ,r"wtpf", df['sa_wtpf'])
+
+
+ def invehicleperceptionfactor(period): # In-vehicle perception factors
+
+ # Pull attributes
+ tp_tsyscode = h.GetMulti(Visum.Net.TimeProfiles,r"TSYSCODE", activeOnly = True)
+ tp_ivpf = h.GetMulti(Visum.Net.TimeProfiles,r"ivpf" , activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [tp_tsyscode,tp_ivpf]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['tp_tsyscode','tp_ivpf'])
+
+ # Calculate field
+ for y in range(len(df)):
+ if period == 'AM' or period == 'PM': # Peak
+ if df.at[y,'tp_tsyscode'] == 'a':
+ df.at[y,'tp_ivpf'] = 0.95
+ elif ((df.at[y,'tp_tsyscode'] == 'l') or (df.at[y,'tp_tsyscode'] == 'r')):
+ df.at[y,'tp_ivpf'] = 0.88
+ else:
+ df.at[y,'tp_ivpf'] = 1.00
+ else: # Off-Peak
+ if df.at[y,'tp_tsyscode'] == 'a':
+ df.at[y,'tp_ivpf'] = 0.95
+ elif ((df.at[y,'tp_tsyscode'] == 'l') or (df.at[y,'tp_tsyscode'] == 'r')):
+ df.at[y,'tp_ivpf'] = 0.86
+ else:
+ df.at[y,'tp_ivpf'] = 1.00
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.TimeProfiles ,r"ivpf", df['tp_ivpf'])
+
+
+ def boardingpenalty(period): # Boarding penalty (seconds)
+
+ # Pull attributes
+ tp_tsyscode = h.GetMulti(Visum.Net.TimeProfiles,r"TSYSCODE", activeOnly = True)
+ tp_brdpen = h.GetMulti(Visum.Net.TimeProfiles,r"brdpen" , activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [tp_tsyscode,tp_brdpen]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['tp_tsyscode','tp_brdpen'])
+
+ # Calculate field
+ for y in range(len(df)):
+ if period == 'AM' or period == 'PM': # Peak
+ if ((df.at[y,'tp_tsyscode'] == 'l') or (df.at[y,'tp_tsyscode'] == 'r')):
+ df.at[y,'tp_brdpen'] = 000.0
+ elif ((df.at[y,'tp_tsyscode'] == 'a') or (df.at[y,'tp_tsyscode'] == 'e')):
+ df.at[y,'tp_brdpen'] = 372.0
+ else:
+ df.at[y,'tp_brdpen'] = 439.8
+ else: # Off-Peak
+ if ((df.at[y,'tp_tsyscode'] == 'l') or (df.at[y,'tp_tsyscode'] == 'r')):
+ df.at[y,'tp_brdpen'] = 000.0
+ elif ((df.at[y,'tp_tsyscode'] == 'a') or (df.at[y,'tp_tsyscode'] == 'e')):
+ df.at[y,'tp_brdpen'] = 166.8
+ else:
+ df.at[y,'tp_brdpen'] = 540.6
+
+ # Add global boarding penalty
+ for y in range(len(df)):
+ df.at[y,'tp_brdpen'] = df.at[y,'tp_brdpen'] + 231
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.TimeProfiles ,r"brdpen", df['tp_brdpen'])
+
+
+ def dwelltime():
+
+ # Pull attributes
+ tpi_tsyscode = h.GetMulti(Visum.Net.TimeProfileItems,r"TIMEPROFILE\TSYSCODE" , activeOnly = True)
+ tpi_emmedwt = h.GetMulti(Visum.Net.TimeProfileItems,r"LINEROUTEITEM\EMME_DWT", activeOnly = True)
+ tpi_prelength = h.GetMulti(Visum.Net.TimeProfileItems,r"PreLength" , activeOnly = True)
+ tpi_dwelltime = h.GetMulti(Visum.Net.TimeProfileItems,r"DWELL_TIME" , activeOnly = True)
+
+
+ # Make Visum list with link data
+ att_list = [tpi_tsyscode,tpi_emmedwt,tpi_prelength,tpi_dwelltime]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['tpi_tsyscode','tpi_emmedwt','tpi_prelength','tpi_dwelltime'])
+
+ # Convert fields to float
+ df[['tpi_emmedwt','tpi_prelength','tpi_dwelltime']] = df[['tpi_emmedwt','tpi_prelength','tpi_dwelltime']].astype(float)
+
+ # Calculate field
+ for y in range(len(df)):
+ if df.at[y,'tpi_tsyscode'] == 'b': # Local Bus dwell time based on prelength and Emme_DWT
+ df.at[y,'tpi_dwelltime'] = df.at[y,'tpi_emmedwt'] * 60 * df.at[y,'tpi_prelength']
+ else: # Non-Local Bus dwell time based on Emme_DWT only
+ df.at[y,'tpi_dwelltime'] = df.at[y,'tpi_emmedwt'] * 60
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.TimeProfileItems ,r"DWELL_TIME", df['tpi_dwelltime'])
+
+
+ def calc_ttf(ft, timau, length, us1, default_speed): # INPUT TO RUNTIME FUNCTION
+ if us1 <= 0:
+ us1 = default_speed
+
+ if length:
+ transit_time = 3600*length / default_speed
+
+ if timau < 999:
+ if ft == 1:
+ transit_time = timau * 1.15
+ elif ft == 2:
+ transit_time = timau * 1.20
+ elif ft == 3:
+ transit_time = timau
+ elif ft == 4:
+ transit_time = timau * 1.09
+ elif ft == 5:
+ transit_time = 60* (60 * length / us1)
+ elif ft == 6:
+ transit_time = 60* (60 * length / us1 + 60 * length / 180)
+ elif ft ==11:
+ transit_time = timau * 1.15
+ elif ft ==12:
+ transit_time = timau * 1.30
+ elif ft ==13:
+ transit_time = timau
+ elif ft ==14:
+ transit_time =timau * 1.09
+ elif ft == 15:
+ transit_time = 60* (60 * length / us1)
+ elif ft == 16:
+ transit_time = 60* (60 * length / us1 + 60 * length / 180)
+ elif ft == 21:
+ transit_time = timau * 1.05
+ elif ft == 22:
+ transit_time = timau * 1.03
+ else:
+ transit_time = 1
+
+ return transit_time
+
+ def runtime(period):
+ if period == 'AM' or period == 'PM': # Peak
+ tpitems = Visum.Net.TimeProfileItems.GetMultipleAttributes(["LINEROUTEITEM\\EMME_TTFINDEX", "SUM:USEDLINEROUTEITEMS\\OUTLINK\\AddVal1","SUM:USEDLINEROUTEITEMS\\POSTLENGTH",
+ "LINEROUTEITEM\\EMME_DATA1"])
+ else: # Off-Peak
+ tpitems = Visum.Net.TimeProfileItems.GetMultipleAttributes(["LINEROUTEITEM\\EMME_TTFINDEX", "SUM:USEDLINEROUTEITEMS\\OUTLINK\\AddVal2","SUM:USEDLINEROUTEITEMS\\POSTLENGTH",
+ "LINEROUTEITEM\\EMME_DATA1"])
+ result = []
+
+ default_speed = 30
+ for ft, timau, length, us1 in tpitems:
+ haul_time = calc_ttf(ft, timau, length, us1, default_speed)
+ result.append([haul_time, ])
+
+ Visum.Net.TimeProfileItems.SetMultipleAttributes(["AddVal"], result)
+
+ def combinerundwelltimes():
+
+ # Pull attributes
+ tpi_addval = h.GetMulti(Visum.Net.TimeProfileItems,r"ADDVAL" , activeOnly = True)
+ tpi_dwelltime = h.GetMulti(Visum.Net.TimeProfileItems,r"DWELL_TIME" , activeOnly = True)
+
+ # Make Visum list with link data
+ att_list = [tpi_addval,tpi_dwelltime]
+
+ # Put Visum link list into dataframe
+ df = pd.DataFrame(np.column_stack(att_list), columns = ['tpi_addval','tpi_dwelltime'])
+
+ # Convert fields to float
+ df[['tpi_addval','tpi_dwelltime']] = df[['tpi_addval','tpi_dwelltime']].astype(float)
+
+ # Calculate field
+ df['tpi_addval'] = df['tpi_addval'] + df['tpi_dwelltime']
+
+ # Set fields back in Visum
+ h.SetMulti(Visum.Net.TimeProfileItems ,r"ADDVAL", df['tpi_addval'])
+
+
+
+ # RUN FUNCTIONS
+ # Setting Skim Attributes
+ ismode()
+ headway(period)
+ op_bushr()
+ stoptype(period)
+ waittime(period)
+ invehicleperceptionfactor(period)
+ boardingpenalty(period)
+ # Setting Dwell and Run times
+ dwelltime()
+ runtime(period)
+ combinerundwelltimes()
+
+
+procedure_code = Visum.Procedures.OperationExecutor.GetCurrentOperation().AttValue("CODE") # Example: outputs a string like 'AM' from AM in the code box
+put_skim_setup(procedure_code)
+
diff --git a/maz_maz_stop_skims/.gitignore b/maz_maz_stop_skims/.gitignore
new file mode 100644
index 0000000..f4f7aec
--- /dev/null
+++ b/maz_maz_stop_skims/.gitignore
@@ -0,0 +1,4 @@
+# Folders
+input/
+output/
+preprocessing/
\ No newline at end of file
diff --git a/maz_maz_stop_skims/2zoneSkim.py b/maz_maz_stop_skims/2zoneSkim.py
new file mode 100644
index 0000000..13b3e72
--- /dev/null
+++ b/maz_maz_stop_skims/2zoneSkim.py
@@ -0,0 +1,607 @@
+import pandas as pd
+import pandana as pdna
+import numpy as np
+import yaml
+import os
+import geopandas as gpd
+from datetime import datetime
+from typing import Tuple, Dict, List
+from dataclasses import dataclass
+import logging
+
+@dataclass
+class SkimParameters:
+ """Configuration parameters for skim generation read from YAML file"""
+ max_maz_maz_walk_dist_feet: int
+ max_maz_maz_bike_dist_feet: int
+ max_maz_local_bus_stop_walk_dist_feet: int
+ max_maz_premium_transit_stop_walk_dist_feet: int
+ walk_speed_mph: float
+ drive_speed_mph: float
+
+ @classmethod
+ def from_yaml(cls, yaml_data: dict) -> 'SkimParameters':
+ """Create parameters from YAML configuration"""
+ mmms = yaml_data['mmms']
+ return cls(
+ max_maz_maz_walk_dist_feet=int(mmms['max_maz_maz_walk_dist_feet']),
+ max_maz_maz_bike_dist_feet=int(mmms['max_maz_maz_bike_dist_feet']),
+ max_maz_local_bus_stop_walk_dist_feet=int(mmms['max_maz_local_bus_stop_walk_dist_feet']),
+ max_maz_premium_transit_stop_walk_dist_feet=int(mmms['max_maz_premium_transit_stop_walk_dist_feet']),
+ walk_speed_mph=float(mmms['walk_speed_mph']),
+ drive_speed_mph=float(mmms['drive_speed_mph'])
+ )
+
+class NetworkBuilder:
+ """
+ Handles network construction, and node assignment, and transit stop processing for transportation networks.
+
+ This class processes raw node and link data to create a network and processes transit stops. It handles coordinate projections, network topology,
+ and centroid connections.
+ """
+
+ def __init__(self, nodes: gpd.GeoDataFrame, links: gpd.GeoDataFrame,
+ stops: pd.DataFrame, routes: pd.DataFrame, config: dict):
+ """
+ Initialize the NetworkBuilder with nodes, links, and configuration.
+
+ Args:
+ nodes (gpd.GeoDataFrame): GeoDataFrame containing node geometries and attributes.
+ Must include columns for node ID and coordinates.
+ links (gpd.GeoDataFrame): GeoDataFrame containing link geometries and attributes.
+ Must include columns for from/to nodes and length.
+ config (dict): Configuration dictionary containing:
+ - mmms (dict): Network construction parameters
+ - mmms_link_ref_id (str): Column name for link start node
+ - mmms_link_nref_id (str): Column name for link end node
+ - mmms_link_len (str): Column name for link length
+
+ Note:
+ The network is built immediately upon initialization using _build_network().
+ """
+ self.nodes = nodes
+ self.links = links
+ self.stops = stops
+ self.routes = routes
+ self.config = config
+ self.network = self._build_network(nodes, links, config)
+
+ @classmethod
+ def from_files(cls, model_inputs: str, config: dict) -> 'NetworkBuilder':
+ """
+ Create a NetworkBuilder instance by reading nodes and links from shapefiles.
+
+ This factory method handles reading and processing the raw input files to create
+ a properly configured NetworkBuilder instance.
+
+ Args:
+ model_inputs (str): Path to directory containing input shapefiles
+ config (dict): Configuration dictionary containing:
+ - mmms (dict): File and processing parameters
+ - shapefile_node_name (str): Filename for nodes shapefile
+ - shapefile_name (str): Filename for links shapefile
+
+ Returns:
+ NetworkBuilder: A fully initialized NetworkBuilder instance with processed
+ nodes and links.
+ """
+ # Read and process nodes
+ nodes = gpd.read_file(os.path.join(model_inputs, config['mmms']['shapefile_node_name']))
+ nodes = cls._process_nodes(nodes, config)
+
+ # Read and process links
+ links = gpd.read_file(os.path.join(model_inputs, config['mmms']['shapefile_name']))
+ links = cls._process_links(links, config)
+
+ # Read and process stops and routes
+ stops = pd.read_csv(os.path.join(model_inputs, config['stop_attributes']['file']))
+ routes = pd.read_csv(os.path.join(model_inputs, config['route_attributes']['file']))
+ cols = [config['route_attributes']['rid_field'],
+ config['route_attributes']['mode']]
+ routes = routes.filter(cols)
+ stops = stops.merge(routes,
+ left_on=config['stop_attributes']['rid_field'],
+ right_on=config['route_attributes']['rid_field'])
+ network = cls._build_network(nodes, links, config)
+ stops = cls.process_transit_stops(stops, network, nodes, config)
+
+ return cls(nodes, links, stops, routes, config)
+
+ @staticmethod
+ def _process_nodes(nodes: gpd.GeoDataFrame, config: dict) -> gpd.GeoDataFrame:
+ """
+ Process raw nodes GeoDataFrame by projecting and adding coordinates.
+
+ Performs the following operations:
+ 1. Projects the geometries to project projected crs
+ 2. Sets NodeLev_ID as the index
+ 3. Extracts X and Y coordinates from the geometry
+
+ Args:
+ nodes (gpd.GeoDataFrame): Raw nodes GeoDataFrame with geometry column
+
+ Returns:
+ gpd.GeoDataFrame: Processed nodes with:
+ - Projected coordinates
+ - NodeLev_ID as index
+ - X and Y columns containing coordinates
+ """
+ crs = config['settings']['epsg']
+ nodes = nodes.to_crs(crs).set_index(config['stop_attributes']['id_field'])
+ nodes['X'] = nodes.geometry.x
+ nodes['Y'] = nodes.geometry.y
+ return nodes
+
+ @staticmethod
+ def _process_links(links: gpd.GeoDataFrame, config: dict) -> gpd.GeoDataFrame:
+ """
+ Process raw links GeoDataFrame.
+
+ Args:
+ links: Raw links GeoDataFrame
+
+ Returns:
+ Processed links GeoDataFrame
+ """
+ return links.to_crs(config['settings']['epsg'])
+
+ @classmethod
+ def _build_network(cls, nodes: gpd.GeoDataFrame, links: gpd.GeoDataFrame, config: dict) -> pdna.Network:
+ """Build pandana network from nodes and links"""
+ mmms = config['mmms']
+ # nodes.index = nodes.index.astype('int32')
+ # links.index = links.index.astype('int32')
+ # links[mmms['mmms_link_ref_id']] = links[mmms['mmms_link_ref_id']].astype('int32')
+ # links[mmms['mmms_link_nref_id']] = links[mmms['mmms_link_nref_id']].astype('int32')
+
+ # links[mmms['mmms_link_ref_id']] = links[mmms['mmms_link_ref_id']].astype('float64')
+ # links[mmms['mmms_link_nref_id']] = links[mmms['mmms_link_nref_id']].astype('float64')
+
+ # print(links[mmms['mmms_link_ref_id']].dtype)
+ # print(links[mmms['mmms_link_nref_id']].dtype)
+ # print(links[mmms['mmms_link_len']].dtype)
+ # print("Before: ", links.index.dtype)
+ # links.index = links.index.astype('int32')
+ # print("After: ", links.index.dtype)
+ # print("NaN's:", links.isna().sum())
+
+ # links[mmms['mmms_link_len']] = links[mmms['mmms_link_len']].astype('int32')
+
+ return pdna.Network(
+ nodes.X,
+ nodes.Y,
+ links[mmms['mmms_link_ref_id']],
+ links[mmms['mmms_link_nref_id']],
+ links[[mmms['mmms_link_len']]] / 5280.0,
+ twoway=mmms['two_way_network']
+ )
+
+ @classmethod
+ def get_closest_net_node_to_MGRA(cls, nodes: gpd.GeoDataFrame, links: gpd.GeoDataFrame,
+ config: dict) -> pd.DataFrame:
+ """
+ Gets closest network nodes to MAZ centroids. This is used to assign network nodes to MAZ centroids. This assigned nodes is then used to calculate skims.
+
+ Args:
+ nodes: Processed nodes GeoDataFrame
+ links: Processed links GeoDataFrame
+ config: Configuration dictionary
+
+ Returns:
+ DataFrame containing centroid information
+ """
+ # Get closest network nodes for MAZ centroids, e.g. 'centroid connector' start and end nodes
+ maz_closest_network_node_id = cls._get_closest_network_nodes(nodes, links, config)
+
+ # Create centroids DataFrame
+ centroids = cls._create_centroids_df(nodes, config)
+
+ # Merge with closest network nodes. This will add the end node of the connector as the associated network node for the MGRA
+ return cls._merge_centroids_with_connector_end_nodes(centroids, maz_closest_network_node_id,
+ nodes, config)
+
+ @staticmethod
+ def _get_closest_network_nodes(nodes: gpd.GeoDataFrame,
+ links: gpd.GeoDataFrame,
+ config: dict) -> pd.DataFrame:
+ """Get closest network nodes for MAZs"""
+ mmms = config['mmms']
+ maz_id = mmms['mmms_node_maz_id']
+
+ # Return the 'centroid connector' start and end nodes
+ return links[links[mmms["mmms_link_ref_id"]].isin(
+ list(nodes[nodes[maz_id]!=0].index)
+ )][[mmms["mmms_link_ref_id"], mmms["mmms_link_nref_id"]]]
+
+ @staticmethod
+ def _create_centroids_df(nodes: gpd.GeoDataFrame, config: dict) -> pd.DataFrame:
+ """Create initial centroids DataFrame"""
+ maz_nodes = nodes[nodes[config['mmms']['mmms_node_maz_id']]!=0]
+ return pd.DataFrame({
+ 'X': maz_nodes.X,
+ 'Y': maz_nodes.Y,
+ 'MAZ': maz_nodes.MAZ,
+ 'MAZ_centroid_id': maz_nodes.index
+ })
+
+ @staticmethod
+ def _merge_centroids_with_connector_end_nodes(centroids: pd.DataFrame,
+ closest_nodes: pd.DataFrame,
+ nodes: gpd.GeoDataFrame,
+ config: dict) -> pd.DataFrame:
+ """Merge centroids with their closest network nodes at the nd of the connector"""
+ mmms = config['mmms']
+
+ centroids = pd.merge(
+ centroids,
+ closest_nodes,
+ left_on='MAZ_centroid_id',
+ right_on=mmms["mmms_link_ref_id"],
+ how='left'
+ )
+
+ centroids = centroids.rename(columns={mmms["mmms_link_nref_id"]: 'network_node_id'})
+
+ # Add network node coordinates
+ centroids["network_node_x"] = nodes["X"].loc[centroids["network_node_id"]].tolist()
+ centroids["network_node_y"] = nodes["Y"].loc[centroids["network_node_id"]].tolist()
+
+ return centroids
+
+ @staticmethod
+ def _process_stop_geometry(stops: pd.DataFrame, config: dict) -> gpd.GeoDataFrame:
+ """
+ Convert stops to GeoDataFrame and project coordinates.
+
+ Args:
+ stops (pd.DataFrame): Stops DataFrame with Longitude/Latitude
+
+ Returns:
+ gpd.GeoDataFrame: Projected stops with updated coordinates
+ """
+ crs = config['settings']['epsg']
+ pd.set_option('display.float_format', lambda x: '%.9f' % x)
+
+ gpd_stops = gpd.GeoDataFrame(
+ stops,
+ geometry=gpd.points_from_xy(stops.Longitude, stops.Latitude, crs='epsg:4326')
+ )
+ gpd_stops = gpd_stops.to_crs(crs)
+
+ gpd_stops['Longitude'] = gpd_stops['geometry'].x
+ gpd_stops['Latitude'] = gpd_stops['geometry'].y
+
+ return gpd_stops
+
+ @staticmethod
+ def _assign_network_nodes_to_stops(
+ stops: pd.DataFrame,
+ gpd_stops: gpd.GeoDataFrame,
+ net: pdna.Network,
+ nodes: pd.DataFrame
+ ) -> pd.DataFrame:
+ """
+ Assign network nodes to stops.
+
+ Args:
+ stops (pd.DataFrame): Stops DataFrame
+ gpd_stops (gpd.GeoDataFrame): Projected stops
+ net (pdna.Network): Network object
+ nodes (pd.DataFrame): Network nodes
+
+ Returns:
+ pd.DataFrame: Stops with assigned network nodes and coordinates
+ """
+ stops["network_node_id"] = net.get_node_ids(gpd_stops['Longitude'], gpd_stops['Latitude'])
+ stops["network_node_x"] = nodes["X"].loc[stops["network_node_id"]].tolist()
+ stops["network_node_y"] = nodes["Y"].loc[stops["network_node_id"]].tolist()
+ return stops
+
+ @staticmethod
+ def _assign_transit_modes(stops: pd.DataFrame, config: dict) -> pd.DataFrame:
+ # FIXME: Add mode dict to configs
+ """
+ Assign simplified transit modes to stops.
+
+ Modes:
+ - L: Local
+ - E: Premium
+ - N: None (should not occur)
+
+ Args:
+ stops (pd.DataFrame): Stops DataFrame with Mode column
+
+ Returns:
+ pd.DataFrame: Stops with assigned simplified modes
+ """
+ stops['Mode'] = np.where(
+ stops['Mode'].isin(config['modes']['local_modes']), 'L',
+ np.where(stops['Mode'].isin(config['modes']['prm_modes']), 'E', 'N')
+ )
+ return stops
+
+ @classmethod
+ def process_transit_stops(cls, stops: pd.DataFrame, network: pdna.Network,
+ nodes: pd.DataFrame, config: dict) -> pd.DataFrame:
+ """Process transit stops using provided network."""
+ gpd_stops = cls._process_stop_geometry(stops, config)
+ stops = cls._assign_network_nodes_to_stops(stops, gpd_stops, network, nodes)
+ stops = cls._assign_transit_modes(stops, config)
+ return stops
+
+class SkimGenerator:
+ """Main class for generating walk, bike, and stop skims"""
+
+ def __init__(self, network_builder: NetworkBuilder, params: SkimParameters, output_path: str):
+ self.network_builder = network_builder
+ self.params = params
+ self.output_path = output_path
+ self.net_centroids = self._get_closest_net_node(network_builder.nodes, network_builder.links, network_builder.config)
+ self.stops = self.network_builder.stops
+
+ def _get_closest_net_node(self, nodes: gpd.GeoDataFrame, links: gpd.GeoDataFrame, config: dict) -> pd.DataFrame:
+ """Get centroids DataFrame"""
+ return self.network_builder.get_closest_net_node_to_MGRA(nodes, links, config)
+
+ def generate_maz_maz_walk_skim(self) -> pd.DataFrame:
+ """Generate MAZ to MAZ walk skims"""
+ maz_pairs = self._create_maz_pairs(self.net_centroids)
+ walk_skim = self._get_walk_distances(maz_pairs, self.params.max_maz_maz_walk_dist_feet)
+ # Add intrazonal distances
+ walk_skim = self._add_intrazonal_distances(walk_skim)
+ walk_skim = self._convert_columns_to_type(walk_skim, {'OMAZ': 'uint16', 'DMAZ': 'uint16', 'i': 'uint16', 'j': 'uint16'})
+ return walk_skim
+
+ def generate_maz_maz_bike_skim(self) -> pd.DataFrame:
+ """Generate MAZ to MAZ bike skims"""
+ maz_pairs = self._create_maz_pairs(self.net_centroids)
+ bike_skim = self._get_bike_distances(maz_pairs, self.params.max_maz_maz_bike_dist_feet)
+ bike_skim = self._convert_columns_to_type(bike_skim, {'OMAZ': 'uint16', 'DMAZ': 'uint16'})
+ return bike_skim
+
+ def generate_maz_stop_walk_skim(self) -> pd.DataFrame:
+ """Generate MAZ to stop walk skims"""
+ maz_stop_pairs, maz_stop_output = self._create_maz_stop_pairs(self.net_centroids, self.stops)
+ stop_skim = self._get_stop_distances(maz_stop_pairs)
+ stop_skim = self._process_stop_skims_by_mode(stop_skim, maz_stop_output)
+
+ return stop_skim.sort_values('maz')
+
+ def _create_maz_pairs(self, centroids: pd.DataFrame) -> pd.DataFrame:
+ """Create all possible MAZ to MAZ pairs with their network nodes"""
+ o_m = np.repeat(centroids['MAZ'].tolist(), len(centroids))
+ d_m = np.tile(centroids['MAZ'].tolist(), len(centroids))
+
+ pairs = pd.DataFrame({
+ "OMAZ": o_m,
+ "DMAZ": d_m,
+ "OMAZ_NODE": np.repeat(centroids['network_node_id'].tolist(), len(centroids)),
+ "DMAZ_NODE": np.tile(centroids['network_node_id'].tolist(), len(centroids)),
+ "OMAZ_NODE_X": np.repeat(centroids['network_node_x'].tolist(), len(centroids)),
+ "OMAZ_NODE_Y": np.repeat(centroids['network_node_y'].tolist(), len(centroids)),
+ "DMAZ_NODE_X": np.tile(centroids['network_node_x'].tolist(), len(centroids)),
+ "DMAZ_NODE_Y": np.tile(centroids['network_node_y'].tolist(), len(centroids))
+ })
+
+ pairs["DISTWALK"] = pairs.eval("(((OMAZ_NODE_X-DMAZ_NODE_X)**2 + (OMAZ_NODE_Y-DMAZ_NODE_Y)**2)**0.5) / 5280.0")
+ return pairs[pairs["OMAZ"] != pairs["DMAZ"]]
+
+ def _create_maz_stop_pairs(self, centroids: pd.DataFrame, stops: pd.DataFrame, ) -> Tuple[pd.DataFrame, pd.DataFrame]:
+ """
+ Build a table of MAZ to transit stop connections with initial distances.
+
+ Creates a cross-join between MAZs and stops, calculating the straight-line
+ distance between each MAZ-stop pair. Distances are converted to miles.
+
+
+ """
+ # Create cross product of MAZs and stops
+ o_m = np.repeat(centroids['MAZ'].tolist(), len(stops))
+ o_m_nn = np.repeat(centroids['network_node_id'].tolist(), len(stops))
+ d_t = np.tile(stops['NO'].tolist(), len(centroids))
+ d_t_nn = np.tile(stops['network_node_id'].tolist(), len(centroids))
+ o_m_x = np.repeat(centroids['network_node_x'].tolist(), len(stops))
+ o_m_y = np.repeat(centroids['network_node_y'].tolist(), len(stops))
+ d_t_x = np.tile(stops['network_node_x'].tolist(), len(centroids))
+ d_t_y = np.tile(stops['network_node_y'].tolist(), len(centroids))
+ mode = np.tile(stops['Mode'].tolist(), len(centroids))
+
+ # Create DataFrame
+ pairs = pd.DataFrame({
+ "MAZ": o_m,
+ "stop": d_t,
+ "OMAZ_NODE": o_m_nn,
+ "DSTOP_NODE": d_t_nn,
+ "OMAZ_NODE_X": o_m_x,
+ "OMAZ_NODE_Y": o_m_y,
+ "DSTOP_NODE_X": d_t_x,
+ "DSTOP_NODE_Y": d_t_y,
+ "MODE": mode
+ })
+
+ # Calculate distances in miles
+ pairs["DISTANCE"] = pairs.eval(
+ "(((OMAZ_NODE_X-DSTOP_NODE_X)**2 + (OMAZ_NODE_Y-DSTOP_NODE_Y)**2)**0.5) / 5280.0"
+ )
+
+ #create stop distances output file
+ maz_stop_output = pd.DataFrame(centroids['MAZ'])
+ maz_stop_output = maz_stop_output.rename(columns = {'MAZ': 'maz'})
+ maz_stop_output['maz'] = maz_stop_output['maz'].astype('int')
+ maz_stop_output.sort_values(by=['maz'], inplace=True)
+
+ return pairs, maz_stop_output
+
+ def _get_walk_distances(self, pairs: pd.DataFrame, max_dist: float) -> pd.DataFrame:
+ """Process walking distances between MAZ pairs
+ This finction also adds i, j ,and actual columns that are required by Java code for TNC routing"""
+ filtered = pairs[pairs["DISTWALK"] <= max_dist / 5280.0].copy().reset_index(drop=True)
+ filtered["DISTWALK"] = self.network_builder.network.shortest_path_lengths(
+ filtered["OMAZ_NODE"], filtered["DMAZ_NODE"])
+ result = filtered[filtered["DISTWALK"] <= max_dist / 5280.0]
+
+ # Add missing MAZs
+ result = self._add_missing_mazs(self.net_centroids, result, pairs, 'DISTWALK')
+
+ # Add required fields for TNC routing
+ result[['i', 'j']] = result[['OMAZ', 'DMAZ']]
+ result['actual'] = result['DISTWALK'] / self.params.walk_speed_mph * 60.0
+
+ return result
+
+ def _get_bike_distances(self, pairs: pd.DataFrame, max_dist: float) -> pd.DataFrame:
+ """Process bike distances between MAZ pairs"""
+ filtered = pairs[pairs["DISTWALK"] <= max_dist / 5280.0].copy()
+ filtered["DISTBIKE"] = self.network_builder.network.shortest_path_lengths(
+ filtered["OMAZ_NODE"], filtered["DMAZ_NODE"])
+ result = filtered[filtered["DISTBIKE"] <= max_dist / 5280.0]
+
+ # Add missing MAZs
+ result = self._add_missing_mazs(self.net_centroids, result, pairs, 'DISTBIKE')
+
+ return result
+
+ def _get_stop_distances(self, pairs: pd.DataFrame) -> pd.DataFrame:
+ """Process stop distances between MAZ pairs"""
+ filtered = pairs[(pairs["DISTANCE"] <= self.params.max_maz_local_bus_stop_walk_dist_feet / 5280.0) & (pairs['MODE'] == 'L') |
+ (pairs["DISTANCE"] <= self.params.max_maz_premium_transit_stop_walk_dist_feet / 5280.0) & (pairs['MODE'] == 'E')]
+ filtered.reset_index(drop=True, inplace=True)
+ filtered["DISTWALK"] = self.network_builder.network.shortest_path_lengths(
+ filtered["OMAZ_NODE"], filtered["DSTOP_NODE"])
+
+ result = filtered[(filtered["DISTWALK"] <= self.params.max_maz_local_bus_stop_walk_dist_feet / 5280.0) & (filtered['MODE'] == 'L') |
+ (filtered["DISTWALK"] <= self.params.max_maz_premium_transit_stop_walk_dist_feet / 5280.0) & (filtered['MODE'] == 'E')]
+
+ return result
+
+ def _process_stop_skims_by_mode(self, stop_skims: pd.DataFrame, maz_stop_output: pd.DataFrame) -> pd.DataFrame:
+ """
+ Process stop skims by transit mode and merge with output DataFrame.
+
+ Args:
+ stop_skims (pd.DataFrame): DataFrame containing stop skim data with columns:
+ - MODE: Transit mode ('L' or 'E')
+ - MAZ: MAZ ID
+ - DISTWALK: Walk distance
+ maz_stop_output (pd.DataFrame): Base DataFrame to merge results into
+
+ Returns:
+ pd.DataFrame: Processed DataFrame with walk distances by mode:
+ - maz: MAZ ID
+ - walk_dist_local_bus: Walk distance to nearest local bus stop
+ - walk_dist_premium_transit: Walk distance to nearest premium transit stop
+ """
+ modes = {"L": "local_bus", "E": "premium_transit"}
+
+ for mode, mode_descr in modes.items():
+ stop_skims_by_mode = (
+ stop_skims[stop_skims.MODE == mode]
+ .groupby("MAZ")["DISTWALK"]
+ .min()
+ .reset_index()
+ )
+
+ stop_skims_by_mode = stop_skims_by_mode.rename(
+ {
+ "MAZ": "maz",
+ "DISTWALK": f"walk_dist_{mode_descr}",
+ },
+ axis="columns",
+ )
+
+ maz_stop_output = maz_stop_output.merge(
+ stop_skims_by_mode,
+ on="maz",
+ how="outer"
+ )
+ maz_stop_output[f"walk_dist_{mode_descr}"].fillna(999999, inplace=True)
+
+ return maz_stop_output
+
+ def _add_intrazonal_distances(self, skim: pd.DataFrame) -> pd.DataFrame:
+ """Add intrazonal distances based on 3 nearest neighbors"""
+ skim = skim.sort_values(['OMAZ', 'DISTWALK'])
+ skim.set_index(['OMAZ', 'DMAZ'], inplace=True)
+ unique_omaz = skim.index.get_level_values(0).unique()
+ # find the average of the closest 3 zones
+ means = skim.loc[(unique_omaz, slice(None)), 'DISTWALK'].groupby(level=0).head(3).groupby(level=0).mean()
+ intra_skims = pd.DataFrame({
+ 'OMAZ': unique_omaz,
+ 'DMAZ': unique_omaz,
+ 'DISTWALK': means.values/2,
+ 'i': unique_omaz,
+ 'j': unique_omaz,
+ 'actual': (means.values/self.params.walk_speed_mph * 60.0) / 2
+ }).set_index(['OMAZ', 'DMAZ'])
+
+ return pd.concat([skim, intra_skims], axis=0).reset_index()
+
+ def _add_missing_mazs(self, centroids: pd.DataFrame, skim_table: pd.DataFrame,
+ cost_table: pd.DataFrame, dist_col: str = 'DISTWALK') -> pd.DataFrame:
+ """Add missing MAZs to skim table since some MAZs will not be within distance of a stop or each other.
+ This will make sure we will have skims for all MAZs in the region."""
+ missing_maz = centroids[~centroids['MAZ'].isin(skim_table['OMAZ'])][['MAZ']]
+ missing_maz = missing_maz.rename(columns={'MAZ': 'OMAZ'})
+
+ filtered_cost = cost_table[cost_table['OMAZ'] != cost_table['DMAZ']]
+ if dist_col != 'DISTWALK':
+ filtered_cost = filtered_cost.rename(columns={'DISTWALK': dist_col})
+ sorted_cost = filtered_cost.sort_values(dist_col)
+ grouped_cost = sorted_cost.groupby('OMAZ').agg({
+ 'DMAZ': 'first',
+ dist_col: 'first'
+ }).reset_index()
+
+ missing_maz = missing_maz.merge(grouped_cost, on='OMAZ', how='left')
+ skim_table = pd.concat([skim_table[["OMAZ", "DMAZ", dist_col]], missing_maz]).sort_values(["OMAZ", "DMAZ"])
+
+ return skim_table
+
+ def _convert_columns_to_type(self, df: pd.DataFrame, columns: Dict[str, str]) -> pd.DataFrame:
+ """
+ Convert specified columns in a DataFrame to a given data type.
+
+ Args:
+ df (pd.DataFrame): The DataFrame to convert.
+ columns (Dict[str, str]): A dictionary where keys are column names and values are the target data types.
+
+ Returns:
+ pd.DataFrame: The DataFrame with converted columns.
+ """
+ return df.astype(columns)
+
+def main(path: str):
+ """Main execution function"""
+ # Load configuration
+ with open(os.path.join(path, "2zoneSkim_params.yaml"), 'r') as f:
+ config = yaml.safe_load(f)
+
+ params = SkimParameters.from_yaml(config)
+ model_inputs = os.path.join(path, "input")
+ output_path = os.path.join(path, "output")
+
+ # Create network builder using class method
+ network_builder = NetworkBuilder.from_files(model_inputs, config)
+
+ # Initialize skim generator
+ skim_generator = SkimGenerator(network_builder, params, output_path)
+
+ # Generate and save skims
+ if config['settings']['run_maz_maz_walk']:
+ print(f"{datetime.now().strftime('%H:%M:%S')} Generating MAZ-MAZ walk skims...")
+ walk_skim = skim_generator.generate_maz_maz_walk_skim()
+ walk_skim.to_csv(os.path.join(output_path, config['settings']['maz_maz_walk_output']), index=False)
+
+ if config['settings']['run_maz_maz_bike']:
+ print(f"{datetime.now().strftime('%H:%M:%S')} Generating MAZ-MAZ bike skims...")
+ bike_skim = skim_generator.generate_maz_maz_bike_skim()
+ bike_skim.to_csv(os.path.join(output_path, config['settings']['maz_maz_bike_output']), index=False)
+
+ if config['settings']['run_maz_stop_walk']:
+ print(f"{datetime.now().strftime('%H:%M:%S')} Generating MAZ-stop walk skims...")
+ stop_skim = skim_generator.generate_maz_stop_walk_skim()
+ stop_skim.to_csv(os.path.join(output_path, config['settings']['maz_stop_walk_output']), index=False)
+
+if __name__ == "__main__":
+ import sys
+ main(sys.argv[1])
\ No newline at end of file
diff --git a/maz_maz_stop_skims/2zoneSkim_params.yaml b/maz_maz_stop_skims/2zoneSkim_params.yaml
new file mode 100644
index 0000000..92eba3b
--- /dev/null
+++ b/maz_maz_stop_skims/2zoneSkim_params.yaml
@@ -0,0 +1,54 @@
+# Recommend to use SANDAG's python environment:
+# https://github.com/SANDAG/ABM/blob/main/src/asim/scripts/environment.yml
+
+# Project settings
+settings:
+ run_maz_maz_walk: False
+ run_maz_maz_bike: False
+ run_maz_stop_walk: True
+ maz_maz_walk_output: "maz_maz_walk.csv"
+ maz_maz_bike_output: "maz_maz_bike.csv"
+ maz_stop_walk_output: "maz_stop_walk.csv"
+ input_dir: input
+ output_dir: output
+ epsg: 32126 # projected crs of project
+
+# Stop attributes CSV, expectd in input_dir
+# One stop (row) per transit route
+stop_attributes:
+ file: "stops.csv"
+ id_field: "NO"
+ x_field: "Latitude"
+ y_field: "Longitude"
+ rid_field: "Route_ID"
+
+# Route attributes CSV, expected in input_dir
+route_attributes:
+ file: "routes.csv"
+ rid_field: "Route_ID"
+ mode: "Mode"
+
+# Modes
+modes:
+ prm_modes: ['a', 'l', 'r'] # premium
+ local_modes: ['b', 'e']
+
+# Network and nodes shp, expected in input_dir
+# Network must include connector links
+# Nodes must include connector centroids (MAZ centroids) as nodes
+# Node id must be consistent across network and node files
+mmms:
+ shapefile_name: "links.shp"
+ shapefile_node_name: "nodes.shp"
+ two_way_network: False # True if links are bidirectional, False if one link per direction
+ mmms_link_ref_id: "FROMNODENO"
+ mmms_link_nref_id: "TONODENO"
+ mmms_link_id: "NO"
+ mmms_link_len: "length" # feet
+ mmms_node_maz_id: "MAZ"
+ max_maz_maz_walk_dist_feet: 15840 # int
+ max_maz_maz_bike_dist_feet: 26400 # int
+ max_maz_local_bus_stop_walk_dist_feet: 3960 # int
+ max_maz_premium_transit_stop_walk_dist_feet: 6600 # int
+ walk_speed_mph: 3.0 # float
+ drive_speed_mph: 25.0 # float
\ No newline at end of file