diff --git a/.github/workflows/core_tests.yml b/.github/workflows/core_tests.yml index 61c55299c..2f43642f1 100644 --- a/.github/workflows/core_tests.yml +++ b/.github/workflows/core_tests.yml @@ -49,7 +49,7 @@ jobs: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml mamba install --yes \ "psutil=5.9.5" \ - "pydantic=1.10.13" \ + "pydantic=2.6.1" \ "pypyr=5.8.0" \ "pytables=3.6.1" \ "pytest-cov" \ @@ -149,7 +149,7 @@ jobs: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml mamba install --yes \ "psutil=5.9.5" \ - "pydantic=1.10.13" \ + "pydantic=2.6.1" \ "pypyr=5.8.0" \ "pytables=3.6.1" \ "pytest-cov" \ @@ -247,7 +247,7 @@ jobs: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml mamba install --yes \ "psutil=5.9.5" \ - "pydantic=1.10.13" \ + "pydantic=2.6.1" \ "pypyr=5.8.0" \ "pytables=3.6.1" \ "pytest-cov" \ @@ -344,7 +344,7 @@ jobs: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml mamba install --yes \ "psutil=5.9.5" \ - "pydantic=1.10.13" \ + "pydantic=2.6.1" \ "pypyr=5.8.0" \ "pytables=3.6.1" \ "pytest-cov" \ @@ -411,7 +411,7 @@ jobs: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml mamba install --yes \ "psutil=5.9.5" \ - "pydantic=1.10.13" \ + "pydantic=2.6.1" \ "pypyr=5.8.0" \ "pytables=3.6.1" \ "pytest-cov" \ @@ -477,7 +477,7 @@ jobs: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml mamba install --yes \ "psutil=5.9.5" \ - "pydantic=1.10.13" \ + "pydantic=2.6.1" \ "pypyr=5.8.0" \ "pytables=3.6.1" \ "pytest-cov" \ diff --git a/activitysim/abm/models/non_mandatory_tour_frequency.py b/activitysim/abm/models/non_mandatory_tour_frequency.py index 083b2b397..b300b0e88 100644 --- a/activitysim/abm/models/non_mandatory_tour_frequency.py +++ b/activitysim/abm/models/non_mandatory_tour_frequency.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +import warnings from pathlib import Path from typing import Any @@ -200,12 +201,24 @@ def non_mandatory_tour_frequency( model_settings_file_name, ) - # FIXME kind of tacky both that we know to add this here and del it below - # 'tot_tours' is used in model_spec expressions alternatives = simulate.read_model_alts( state, "non_mandatory_tour_frequency_alternatives.csv", set_index=None ) - alternatives["tot_tours"] = alternatives.sum(axis=1) + if "tot_tours" not in alternatives.columns: + # add a column for total tours + alternatives["tot_tours"] = alternatives.sum(axis=1) + warnings.warn( + "The 'tot_tours' column may not be automatically added in the future.", + FutureWarning, + ) + else: + # tot_tours already exists, check if it is consistent with legacy behavior + if not (alternatives["tot_tours"] == alternatives.sum(axis=1)).all(): + warnings.warn( + "The 'tot_tours' column in non_mandatory_tour_frequency_alternatives.csv " + "does not match the sum of the other columns.", + RuntimeWarning, + ) # filter based on results of CDAP choosers = persons_merged diff --git a/activitysim/abm/models/util/canonical_ids.py b/activitysim/abm/models/util/canonical_ids.py index de5ed8ca8..ab2623916 100644 --- a/activitysim/abm/models/util/canonical_ids.py +++ b/activitysim/abm/models/util/canonical_ids.py @@ -182,7 +182,7 @@ def determine_flavors_from_alts_file( flavors = { c: int(alts[c].max() + max_extension) for c in alts.columns - if all(alts[c].astype(str).str.isnumeric()) + if all(alts[c].astype(str).str.isnumeric()) and (c != "tot_tours") } valid_flavors = all( [(isinstance(flavor, str) & (num >= 0)) for flavor, num in flavors.items()] diff --git a/activitysim/abm/models/util/logsums.py b/activitysim/abm/models/util/logsums.py index b92045fa8..328957965 100644 --- a/activitysim/abm/models/util/logsums.py +++ b/activitysim/abm/models/util/logsums.py @@ -84,9 +84,9 @@ def compute_location_choice_logsums( computed logsums with same index as choosers """ if isinstance(model_settings, dict): - model_settings = TourLocationComponentSettings.parse_obj(model_settings) + model_settings = TourLocationComponentSettings.model_validate(model_settings) if isinstance(logsum_settings, dict): - logsum_settings = TourModeComponentSettings.parse_obj(logsum_settings) + logsum_settings = TourModeComponentSettings.model_validate(logsum_settings) trace_label = tracing.extend_trace_label(trace_label, "compute_logsums") logger.debug(f"Running compute_logsums with {choosers.shape[0]:d} choosers") diff --git a/activitysim/abm/models/util/test/test_vehicle_type_alts.py b/activitysim/abm/models/util/test/test_vehicle_type_alts.py new file mode 100644 index 000000000..d9cf1c176 --- /dev/null +++ b/activitysim/abm/models/util/test/test_vehicle_type_alts.py @@ -0,0 +1,59 @@ +# ActivitySim +# See full license in LICENSE.txt. + +import pandas as pd +import pandas.testing as pdt + +from activitysim.abm.models.vehicle_type_choice import ( + get_combinatorial_vehicle_alternatives, + construct_model_alternatives, + VehicleTypeChoiceSettings, +) +from activitysim.core import workflow + + +def test_vehicle_type_alts(): + state = workflow.State.make_default(__file__) + + alts_cats_dict = { + "body_type": ["Car", "SUV"], + "fuel_type": ["Gas", "BEV"], + "age": [1, 2, 3], + } + + alts_wide, alts_long = get_combinatorial_vehicle_alternatives(alts_cats_dict) + + # alts are initially constructed combinatorially + assert len(alts_long) == 12, "alts_long should have 12 rows" + assert len(alts_wide) == 12, "alts_wide should have 12 rows" + + model_settings = VehicleTypeChoiceSettings.model_construct() + model_settings.combinatorial_alts = alts_cats_dict + model_settings.PROBS_SPEC = None + model_settings.WRITE_OUT_ALTS_FILE = False + + # constructing veh type data with missing alts + vehicle_type_data = pd.DataFrame( + data={ + "body_type": ["Car", "Car", "Car", "SUV", "SUV"], + "fuel_type": ["Gas", "Gas", "BEV", "Gas", "BEV"], + "age": ["1", "2", "3", "1", "2"], + "dummy_data": [1, 2, 3, 4, 5], + }, + index=[0, 1, 2, 3, 4], + ) + + alts_wide, alts_long = construct_model_alternatives( + state, model_settings, alts_cats_dict, vehicle_type_data + ) + + # should only have alts left that are in the file + assert len(alts_long) == 5, "alts_long should have 5 rows" + + # indexes need to be the same to choices match alts + pdt.assert_index_equal(alts_long.index, alts_wide.index) + + # columns need to be in correct order for downstream configs + pdt.assert_index_equal( + alts_long.columns, pd.Index(["body_type", "age", "fuel_type"]) + ) diff --git a/activitysim/abm/models/vehicle_type_choice.py b/activitysim/abm/models/vehicle_type_choice.py index a635ccbe7..6b17d9e35 100644 --- a/activitysim/abm/models/vehicle_type_choice.py +++ b/activitysim/abm/models/vehicle_type_choice.py @@ -244,8 +244,19 @@ def construct_model_alternatives( ), f"missing vehicle data for alternatives:\n {missing_alts}" else: # eliminate alternatives if no vehicle type data - # if this happens, alts_wide is not the same length as alts_long + num_alts_before_filer = len(alts_wide) alts_wide = alts_wide[alts_wide._merge != "left_only"] + logger.warning( + f"Removed {num_alts_before_filer - len(alts_wide)} alternatives not included in input vehicle type data." + ) + # need to also remove any alts from alts_long + alts_long.set_index(["body_type", "age", "fuel_type"], inplace=True) + alts_long = alts_long[ + alts_long.index.isin( + alts_wide.set_index(["body_type", "age", "fuel_type"]).index + ) + ].reset_index() + alts_long.index = alts_wide.index alts_wide.drop(columns="_merge", inplace=True) # converting age to integer to allow interactions in utilities @@ -481,11 +492,11 @@ def iterate_vehicle_type_choice( alts = ( alts_long[alts_long.columns] .apply(lambda row: "_".join(row.values.astype(str)), axis=1) - .values + .to_dict() ) else: - alts = model_spec.columns - choices["vehicle_type"] = choices["vehicle_type"].map(dict(enumerate(alts))) + alts = enumerate(dict(model_spec.columns)) + choices["vehicle_type"] = choices["vehicle_type"].map(alts) # STEP II: append probabilistic vehicle type attributes if probs_spec_file is not None: diff --git a/activitysim/abm/tables/shadow_pricing.py b/activitysim/abm/tables/shadow_pricing.py index 6b06a9d58..1b28883df 100644 --- a/activitysim/abm/tables/shadow_pricing.py +++ b/activitysim/abm/tables/shadow_pricing.py @@ -1233,7 +1233,7 @@ def load_shadow_price_calculator( spc : ShadowPriceCalculator """ if not isinstance(model_settings, TourLocationComponentSettings): - model_settings = TourLocationComponentSettings.parse_obj(model_settings) + model_settings = TourLocationComponentSettings.model_validate(model_settings) num_processes = state.get_injectable("num_processes", 1) diff --git a/activitysim/abm/test/test_misc/test_load_cached_accessibility.py b/activitysim/abm/test/test_misc/test_load_cached_accessibility.py index b19d13646..60288d67c 100644 --- a/activitysim/abm/test/test_misc/test_load_cached_accessibility.py +++ b/activitysim/abm/test/test_misc/test_load_cached_accessibility.py @@ -58,7 +58,7 @@ def test_load_cached_accessibility(): settings = state.settings input_table_list = settings.input_table_list input_table_list.append( - configuration.InputTable.parse_obj( + configuration.InputTable.model_validate( { "tablename": "accessibility", "filename": "cached_accessibility.csv", diff --git a/activitysim/core/configuration/base.py b/activitysim/core/configuration/base.py index 9a3f7b7cd..754865dc1 100644 --- a/activitysim/core/configuration/base.py +++ b/activitysim/core/configuration/base.py @@ -118,7 +118,7 @@ class PreprocessorSettings(PydanticBase): The preprocessor will emit rows to a temporary table that match the rows in this table from the pipeline.""" - TABLES: list[str] | None + TABLES: list[str] | None = None """Names of the additional tables to be merged for the preprocessor. Data from these tables will be merged into the primary table, according diff --git a/activitysim/core/configuration/filesystem.py b/activitysim/core/configuration/filesystem.py index 0f398c8fa..ce50becd4 100644 --- a/activitysim/core/configuration/filesystem.py +++ b/activitysim/core/configuration/filesystem.py @@ -639,7 +639,7 @@ def read_settings_file( include_stack: bool = False, configs_dir_list: tuple[Path] | None = None, validator_class: type[PydanticBase] | None = None, - ) -> dict | PydanticBase: + ) -> PydanticBase | dict: """ Load settings from one or more yaml files. @@ -817,7 +817,7 @@ def backfill_settings(settings, backfill): settings.pop("include_settings", None) if validator_class is not None: - settings = validator_class.parse_obj(settings) + settings = validator_class.model_validate(settings) if include_stack: # if we were called recursively, return an updated list of source_file_paths diff --git a/activitysim/core/logit.py b/activitysim/core/logit.py index 053c46e4a..9d282ddda 100644 --- a/activitysim/core/logit.py +++ b/activitysim/core/logit.py @@ -574,7 +574,7 @@ def each_nest(nest_spec: dict | LogitNestSpec, type=None, post_order=False): raise RuntimeError("Unknown nest type '%s' in call to each_nest" % type) if isinstance(nest_spec, dict): - nest_spec = LogitNestSpec.parse_obj(nest_spec) + nest_spec = LogitNestSpec.model_validate(nest_spec) for _node, nest in _each_nest(nest_spec, parent_nest=Nest(), post_order=post_order): if type is None or (type == nest.type): diff --git a/activitysim/core/mp_tasks.py b/activitysim/core/mp_tasks.py index 7d1ffc30e..db92be1da 100644 --- a/activitysim/core/mp_tasks.py +++ b/activitysim/core/mp_tasks.py @@ -887,7 +887,6 @@ def setup_injectables_and_logging(injectables, locutor: bool = True) -> workflow state = workflow.State() state = state.initialize_filesystem(**injectables) state.settings = injectables.get("settings", Settings()) - # state.settings = Settings.parse_obj(injectables.get("settings_package", {})) # register abm steps and other abm-specific injectables # by default, assume we are running activitysim.abm diff --git a/activitysim/core/simulate.py b/activitysim/core/simulate.py index aff2c53e3..133a8b1dc 100644 --- a/activitysim/core/simulate.py +++ b/activitysim/core/simulate.py @@ -467,7 +467,7 @@ def replace_coefficients(nest: LogitNestSpec): coefficients = coefficients["value"].to_dict() if not isinstance(nest_spec, LogitNestSpec): - nest_spec = LogitNestSpec.parse_obj(nest_spec) + nest_spec = LogitNestSpec.model_validate(nest_spec) replace_coefficients(nest_spec) diff --git a/activitysim/core/test/test_input.py b/activitysim/core/test/test_input.py index d0cfc24e2..bedf100d7 100644 --- a/activitysim/core/test/test_input.py +++ b/activitysim/core/test/test_input.py @@ -69,7 +69,7 @@ def test_csv_reader(seed_households, state): """ settings = yaml.load(settings_yaml, Loader=yaml.SafeLoader) - settings = configuration.Settings.parse_obj(settings) + settings = configuration.Settings.model_validate(settings) state.settings = settings hh_file = state.filesystem.get_data_dir()[0].joinpath("households.csv") @@ -94,7 +94,7 @@ def test_hdf_reader1(seed_households, state): """ settings = yaml.load(settings_yaml, Loader=yaml.SafeLoader) - settings = configuration.Settings.parse_obj(settings) + settings = configuration.Settings.model_validate(settings) state.settings = settings hh_file = state.filesystem.get_data_dir()[0].joinpath("households.h5") @@ -120,7 +120,7 @@ def test_hdf_reader2(seed_households, state): """ settings = yaml.load(settings_yaml, Loader=yaml.SafeLoader) - settings = configuration.Settings.parse_obj(settings) + settings = configuration.Settings.model_validate(settings) state.settings = settings hh_file = state.filesystem.get_data_dir()[0].joinpath("households.h5") @@ -145,7 +145,7 @@ def test_hdf_reader3(seed_households, state): """ settings = yaml.load(settings_yaml, Loader=yaml.SafeLoader) - settings = configuration.Settings.parse_obj(settings) + settings = configuration.Settings.model_validate(settings) state.settings = settings hh_file = state.filesystem.get_data_dir()[0].joinpath("input_data.h5") @@ -169,7 +169,7 @@ def test_missing_filename(seed_households, state): """ settings = yaml.load(settings_yaml, Loader=yaml.SafeLoader) - settings = configuration.Settings.parse_obj(settings) + settings = configuration.Settings.model_validate(settings) state.settings = settings with pytest.raises(AssertionError) as excinfo: @@ -191,7 +191,7 @@ def test_create_input_store(seed_households, state): """ settings = yaml.load(settings_yaml, Loader=yaml.SafeLoader) - settings = configuration.Settings.parse_obj(settings) + settings = configuration.Settings.model_validate(settings) state.settings = settings hh_file = state.filesystem.get_data_dir()[0].joinpath("households.csv") diff --git a/activitysim/core/test/test_util.py b/activitysim/core/test/test_util.py index ae9b4fa83..415ec1f9e 100644 --- a/activitysim/core/test/test_util.py +++ b/activitysim/core/test/test_util.py @@ -7,7 +7,7 @@ import pandas.testing as pdt import pytest -from ..util import other_than, quick_loc_df, quick_loc_series, reindex +from ..util import other_than, quick_loc_df, quick_loc_series, reindex, df_from_dict @pytest.fixture(scope="module") @@ -62,3 +62,30 @@ def test_quick_loc_series(): assert list(quick_loc_series(loc_list, series)) == attrib_list assert list(quick_loc_series(loc_list, series)) == list(series.loc[loc_list]) + + +def test_df_from_dict(): + + index = [1, 2, 3, 4, 5] + df = pd.DataFrame({"attrib": [1, 2, 2, 3, 1]}, index=index) + + # scramble index order for one expression and not the other + sorted = df.eval("attrib.sort_values()") + not_sorted = df.eval("attrib * 1") + + # check above expressions + pdt.assert_series_equal( + sorted, pd.Series([1, 1, 2, 2, 3], index=[1, 5, 2, 3, 4]), check_names=False + ) + pdt.assert_series_equal(not_sorted, df.attrib, check_names=False) + + # create a new dataframe from the above expressions + values = {"sorted": sorted, "not_sorted": not_sorted} + new_df = df_from_dict(values, index) + + # index should become unscrambed and back to the same order as before + expected_df = pd.DataFrame( + {"sorted": [1, 2, 2, 3, 1], "not_sorted": [1, 2, 2, 3, 1]}, index=index + ) + + pdt.assert_frame_equal(new_df, expected_df) diff --git a/activitysim/core/util.py b/activitysim/core/util.py index 221b82d7a..3130b18f5 100644 --- a/activitysim/core/util.py +++ b/activitysim/core/util.py @@ -27,7 +27,6 @@ def si_units(x, kind="B", digits=3, shift=1000): - # nano micro milli kilo mega giga tera peta exa zeta yotta tiers = ["n", "ยต", "m", "", "K", "M", "G", "T", "P", "E", "Z", "Y"] @@ -347,7 +346,6 @@ def assign_in_place(df, df2, downcast_int=False, downcast_float=False): # this is a hack fix for a bug in pandas.update # github.com/pydata/pandas/issues/4094 for c, old_dtype in zip(common_columns, old_dtypes): - # if both df and df2 column were same type, but result is not if (old_dtype == df2[c].dtype) and (df[c].dtype != old_dtype): try: @@ -452,7 +450,20 @@ def auto_opt_pd_dtypes( return df +def reindex_if_series(values, index): + if index is not None: + return values + + if isinstance(values, pd.Series): + assert len(set(values.index).intersection(index)) == len(index) + + if all(values.index != index): + return values.reindex(index=index) + + def df_from_dict(values, index=None): + # If value object is a series and has out of order index, reindex it + values = {k: reindex_if_series(v, index) for k, v in values.items()} df = pd.DataFrame.from_dict(values) if index is not None: @@ -522,7 +533,7 @@ def suffix_tables_in_settings( model_settings = recursive_replace(model_settings, k, suffix + k) if model_settings_type is not None: - model_settings = model_settings_type.parse_obj(model_settings) + model_settings = model_settings_type.model_validate(model_settings) return model_settings diff --git a/activitysim/core/workflow/state.py b/activitysim/core/workflow/state.py index 83c53b53f..f21810bb5 100644 --- a/activitysim/core/workflow/state.py +++ b/activitysim/core/workflow/state.py @@ -437,7 +437,7 @@ def initialize_filesystem( if cache_dir is not None: fs["cache_dir"] = cache_dir try: - self.filesystem: FileSystem = FileSystem.parse_obj(fs) + self.filesystem: FileSystem = FileSystem.model_validate(fs) except Exception as err: print(err) raise @@ -485,7 +485,7 @@ def load_settings(self) -> State: logger.warning(f"settings file changes cache_dir to {cache_dir}") self.filesystem.cache_dir = cache_dir settings_class = self.__class__.settings.member_type - self.settings: Settings = settings_class.parse_obj(raw_settings) + self.settings: Settings = settings_class.model_validate(raw_settings) extra_settings = set(self.settings.__dict__) - set(settings_class.__fields__) diff --git a/activitysim/estimation/larch/location_choice.py b/activitysim/estimation/larch/location_choice.py index fd61aea3d..bade2ef35 100644 --- a/activitysim/estimation/larch/location_choice.py +++ b/activitysim/estimation/larch/location_choice.py @@ -3,6 +3,8 @@ import os from pathlib import Path from typing import Collection +import pickle +from datetime import datetime import numpy as np import pandas as pd @@ -46,6 +48,8 @@ def location_choice_model( settings_file="{name}_model_settings.yaml", landuse_file="{name}_landuse.csv", return_data=False, + alt_values_to_feather=False, + chunking_size=None, ): model_selector = name.replace("_location", "") model_selector = model_selector.replace("_destination", "") @@ -59,12 +63,42 @@ def _read_csv(filename, **kwargs): filename = filename.format(name=name) return pd.read_csv(os.path.join(edb_directory, filename), **kwargs) + def _read_feather(filename, **kwargs): + filename = filename.format(name=name) + return pd.read_feather(os.path.join(edb_directory, filename), **kwargs) + + def _to_feather(df, filename, **kwargs): + filename = filename.format(name=name) + return df.to_feather(os.path.join(edb_directory, filename), **kwargs) + + def _read_pickle(filename, **kwargs): + filename = filename.format(name=name) + return pd.read_pickle(os.path.join(edb_directory, filename)) + + def _to_pickle(df, filename, **kwargs): + filename = filename.format(name=name) + return df.to_pickle(os.path.join(edb_directory, filename)) + + def _file_exists(filename): + filename = filename.format(name=name) + return os.path.exists(os.path.join(edb_directory, filename)) + coefficients = _read_csv( coefficients_file, index_col="coefficient_name", ) spec = _read_csv(spec_file, comment="#") - alt_values = _read_csv(alt_values_file) + + # read alternative values either as csv or feather file + alt_values_fea_file = alt_values_file.replace(".csv", ".fea") + if os.path.exists( + os.path.join(edb_directory, alt_values_fea_file.format(name=name)) + ): + alt_values = _read_feather(alt_values_fea_file) + else: + alt_values = _read_csv(alt_values_file) + if alt_values_to_feather: + _to_feather(df=alt_values, filename=alt_values_fea_file) chooser_data = _read_csv(chooser_file) landuse = _read_csv(landuse_file, index_col="zone_id") master_size_spec = _read_csv(size_spec_file) @@ -152,7 +186,48 @@ def _read_csv(filename, **kwargs): chooser_index_name = chooser_data.columns[0] x_co = chooser_data.set_index(chooser_index_name) - x_ca = cv_to_ca(alt_values.set_index([chooser_index_name, alt_values.columns[1]])) + + def split(a, n): + k, m = divmod(len(a), n) + return (a[i * k + min(i, m) : (i + 1) * k + min(i + 1, m)] for i in range(n)) + + # process x_ca with cv_to_ca with or without chunking + x_ca_pickle_file = "{name}_x_ca.pkl" + if chunking_size == None: + x_ca = cv_to_ca( + alt_values.set_index([chooser_index_name, alt_values.columns[1]]) + ) + elif _file_exists(x_ca_pickle_file): + # if pickle file from previous x_ca processing exist, load it to save time + time_start = datetime.now() + x_ca = _read_pickle(x_ca_pickle_file) + print( + f"x_ca data loaded from {name}_x_ca.fea - time elapsed {(datetime.now() - time_start).total_seconds()}" + ) + else: + time_start = datetime.now() + # calculate num_chunks based on chunking_size (or max number of rows per chunk) + num_chunks = int(len(alt_values) / chunking_size) + all_person_ids = list(alt_values["person_id"].unique()) + split_ids = list(split(all_person_ids, num_chunks)) + x_ca_list = [] + i = 0 + for chunk_ids in split_ids: + alt_values_i = alt_values[alt_values["person_id"].isin(chunk_ids)] + x_ca_i = cv_to_ca( + alt_values_i.set_index([chooser_index_name, alt_values_i.columns[1]]) + ) + x_ca_list.append(x_ca_i) + print( + f"\rx_ca_i compute done for chunk {i}/{num_chunks} - time elapsed {(datetime.now() - time_start).total_seconds()}" + ) + i = i + 1 + x_ca = pd.concat(x_ca_list, axis=0) + # save final x_ca result as pickle file to save time for future data loading + _to_pickle(df=x_ca, filename=x_ca_pickle_file) + print( + f"x_ca compute done - time elapsed {(datetime.now() - time_start).total_seconds()}" + ) if CHOOSER_SEGMENT_COLUMN_NAME is not None: # label segments with names diff --git a/activitysim/examples/placeholder_psrc/configs/non_mandatory_tour_frequency_alternatives.csv b/activitysim/examples/placeholder_psrc/configs/non_mandatory_tour_frequency_alternatives.csv index b9765aa75..09e89fae3 100755 --- a/activitysim/examples/placeholder_psrc/configs/non_mandatory_tour_frequency_alternatives.csv +++ b/activitysim/examples/placeholder_psrc/configs/non_mandatory_tour_frequency_alternatives.csv @@ -1,97 +1,97 @@ -escort,shopping,othmaint,othdiscr,eatout,social -0,0,0,0,0,0 -0,0,0,1,0,0 -0,0,0,0,0,1 -0,0,0,1,0,1 -0,0,0,0,1,0 -0,0,0,1,1,0 -0,0,0,0,1,1 -0,0,0,1,1,1 -0,0,1,0,0,0 -0,0,1,1,0,0 -0,0,1,0,0,1 -0,0,1,1,0,1 -0,0,1,0,1,0 -0,0,1,1,1,0 -0,0,1,0,1,1 -0,0,1,1,1,1 -0,1,0,0,0,0 -0,1,0,1,0,0 -0,1,0,0,0,1 -0,1,0,1,0,1 -0,1,0,0,1,0 -0,1,0,1,1,0 -0,1,0,0,1,1 -0,1,0,1,1,1 -0,1,1,0,0,0 -0,1,1,1,0,0 -0,1,1,0,0,1 -0,1,1,1,0,1 -0,1,1,0,1,0 -0,1,1,1,1,0 -0,1,1,0,1,1 -0,1,1,1,1,1 -1,0,0,0,0,0 -1,0,0,1,0,0 -1,0,0,0,0,1 -1,0,0,1,0,1 -1,0,0,0,1,0 -1,0,0,1,1,0 -1,0,0,0,1,1 -1,0,0,1,1,1 -1,0,1,0,0,0 -1,0,1,1,0,0 -1,0,1,0,0,1 -1,0,1,1,0,1 -1,0,1,0,1,0 -1,0,1,1,1,0 -1,0,1,0,1,1 -1,0,1,1,1,1 -1,1,0,0,0,0 -1,1,0,1,0,0 -1,1,0,0,0,1 -1,1,0,1,0,1 -1,1,0,0,1,0 -1,1,0,1,1,0 -1,1,0,0,1,1 -1,1,0,1,1,1 -1,1,1,0,0,0 -1,1,1,1,0,0 -1,1,1,0,0,1 -1,1,1,1,0,1 -1,1,1,0,1,0 -1,1,1,1,1,0 -1,1,1,0,1,1 -1,1,1,1,1,1 -2,0,0,0,0,0 -2,0,0,1,0,0 -2,0,0,0,0,1 -2,0,0,1,0,1 -2,0,0,0,1,0 -2,0,0,1,1,0 -2,0,0,0,1,1 -2,0,0,1,1,1 -2,0,1,0,0,0 -2,0,1,1,0,0 -2,0,1,0,0,1 -2,0,1,1,0,1 -2,0,1,0,1,0 -2,0,1,1,1,0 -2,0,1,0,1,1 -2,0,1,1,1,1 -2,1,0,0,0,0 -2,1,0,1,0,0 -2,1,0,0,0,1 -2,1,0,1,0,1 -2,1,0,0,1,0 -2,1,0,1,1,0 -2,1,0,0,1,1 -2,1,0,1,1,1 -2,1,1,0,0,0 -2,1,1,1,0,0 -2,1,1,0,0,1 -2,1,1,1,0,1 -2,1,1,0,1,0 -2,1,1,1,1,0 -2,1,1,0,1,1 -2,1,1,1,1,1 +escort,shopping,othmaint,othdiscr,eatout,social,tot_tours +0,0,0,0,0,0,0 +0,0,0,1,0,0,1 +0,0,0,0,0,1,1 +0,0,0,1,0,1,2 +0,0,0,0,1,0,1 +0,0,0,1,1,0,2 +0,0,0,0,1,1,2 +0,0,0,1,1,1,3 +0,0,1,0,0,0,1 +0,0,1,1,0,0,2 +0,0,1,0,0,1,2 +0,0,1,1,0,1,3 +0,0,1,0,1,0,2 +0,0,1,1,1,0,3 +0,0,1,0,1,1,3 +0,0,1,1,1,1,4 +0,1,0,0,0,0,1 +0,1,0,1,0,0,2 +0,1,0,0,0,1,2 +0,1,0,1,0,1,3 +0,1,0,0,1,0,2 +0,1,0,1,1,0,3 +0,1,0,0,1,1,3 +0,1,0,1,1,1,4 +0,1,1,0,0,0,2 +0,1,1,1,0,0,3 +0,1,1,0,0,1,3 +0,1,1,1,0,1,4 +0,1,1,0,1,0,3 +0,1,1,1,1,0,4 +0,1,1,0,1,1,4 +0,1,1,1,1,1,5 +1,0,0,0,0,0,1 +1,0,0,1,0,0,2 +1,0,0,0,0,1,2 +1,0,0,1,0,1,3 +1,0,0,0,1,0,2 +1,0,0,1,1,0,3 +1,0,0,0,1,1,3 +1,0,0,1,1,1,4 +1,0,1,0,0,0,2 +1,0,1,1,0,0,3 +1,0,1,0,0,1,3 +1,0,1,1,0,1,4 +1,0,1,0,1,0,3 +1,0,1,1,1,0,4 +1,0,1,0,1,1,4 +1,0,1,1,1,1,5 +1,1,0,0,0,0,2 +1,1,0,1,0,0,3 +1,1,0,0,0,1,3 +1,1,0,1,0,1,4 +1,1,0,0,1,0,3 +1,1,0,1,1,0,4 +1,1,0,0,1,1,4 +1,1,0,1,1,1,5 +1,1,1,0,0,0,3 +1,1,1,1,0,0,4 +1,1,1,0,0,1,4 +1,1,1,1,0,1,5 +1,1,1,0,1,0,4 +1,1,1,1,1,0,5 +1,1,1,0,1,1,5 +1,1,1,1,1,1,6 +2,0,0,0,0,0,2 +2,0,0,1,0,0,3 +2,0,0,0,0,1,3 +2,0,0,1,0,1,4 +2,0,0,0,1,0,3 +2,0,0,1,1,0,4 +2,0,0,0,1,1,4 +2,0,0,1,1,1,5 +2,0,1,0,0,0,3 +2,0,1,1,0,0,4 +2,0,1,0,0,1,4 +2,0,1,1,0,1,5 +2,0,1,0,1,0,4 +2,0,1,1,1,0,5 +2,0,1,0,1,1,5 +2,0,1,1,1,1,6 +2,1,0,0,0,0,3 +2,1,0,1,0,0,4 +2,1,0,0,0,1,4 +2,1,0,1,0,1,5 +2,1,0,0,1,0,4 +2,1,0,1,1,0,5 +2,1,0,0,1,1,5 +2,1,0,1,1,1,6 +2,1,1,0,0,0,4 +2,1,1,1,0,0,5 +2,1,1,0,0,1,5 +2,1,1,1,0,1,6 +2,1,1,0,1,0,5 +2,1,1,1,1,0,6 +2,1,1,0,1,1,6 +2,1,1,1,1,1,7 diff --git a/activitysim/examples/production_semcog/configs/non_mandatory_tour_frequency_alternatives.csv b/activitysim/examples/production_semcog/configs/non_mandatory_tour_frequency_alternatives.csv index b9765aa75..09e89fae3 100644 --- a/activitysim/examples/production_semcog/configs/non_mandatory_tour_frequency_alternatives.csv +++ b/activitysim/examples/production_semcog/configs/non_mandatory_tour_frequency_alternatives.csv @@ -1,97 +1,97 @@ -escort,shopping,othmaint,othdiscr,eatout,social -0,0,0,0,0,0 -0,0,0,1,0,0 -0,0,0,0,0,1 -0,0,0,1,0,1 -0,0,0,0,1,0 -0,0,0,1,1,0 -0,0,0,0,1,1 -0,0,0,1,1,1 -0,0,1,0,0,0 -0,0,1,1,0,0 -0,0,1,0,0,1 -0,0,1,1,0,1 -0,0,1,0,1,0 -0,0,1,1,1,0 -0,0,1,0,1,1 -0,0,1,1,1,1 -0,1,0,0,0,0 -0,1,0,1,0,0 -0,1,0,0,0,1 -0,1,0,1,0,1 -0,1,0,0,1,0 -0,1,0,1,1,0 -0,1,0,0,1,1 -0,1,0,1,1,1 -0,1,1,0,0,0 -0,1,1,1,0,0 -0,1,1,0,0,1 -0,1,1,1,0,1 -0,1,1,0,1,0 -0,1,1,1,1,0 -0,1,1,0,1,1 -0,1,1,1,1,1 -1,0,0,0,0,0 -1,0,0,1,0,0 -1,0,0,0,0,1 -1,0,0,1,0,1 -1,0,0,0,1,0 -1,0,0,1,1,0 -1,0,0,0,1,1 -1,0,0,1,1,1 -1,0,1,0,0,0 -1,0,1,1,0,0 -1,0,1,0,0,1 -1,0,1,1,0,1 -1,0,1,0,1,0 -1,0,1,1,1,0 -1,0,1,0,1,1 -1,0,1,1,1,1 -1,1,0,0,0,0 -1,1,0,1,0,0 -1,1,0,0,0,1 -1,1,0,1,0,1 -1,1,0,0,1,0 -1,1,0,1,1,0 -1,1,0,0,1,1 -1,1,0,1,1,1 -1,1,1,0,0,0 -1,1,1,1,0,0 -1,1,1,0,0,1 -1,1,1,1,0,1 -1,1,1,0,1,0 -1,1,1,1,1,0 -1,1,1,0,1,1 -1,1,1,1,1,1 -2,0,0,0,0,0 -2,0,0,1,0,0 -2,0,0,0,0,1 -2,0,0,1,0,1 -2,0,0,0,1,0 -2,0,0,1,1,0 -2,0,0,0,1,1 -2,0,0,1,1,1 -2,0,1,0,0,0 -2,0,1,1,0,0 -2,0,1,0,0,1 -2,0,1,1,0,1 -2,0,1,0,1,0 -2,0,1,1,1,0 -2,0,1,0,1,1 -2,0,1,1,1,1 -2,1,0,0,0,0 -2,1,0,1,0,0 -2,1,0,0,0,1 -2,1,0,1,0,1 -2,1,0,0,1,0 -2,1,0,1,1,0 -2,1,0,0,1,1 -2,1,0,1,1,1 -2,1,1,0,0,0 -2,1,1,1,0,0 -2,1,1,0,0,1 -2,1,1,1,0,1 -2,1,1,0,1,0 -2,1,1,1,1,0 -2,1,1,0,1,1 -2,1,1,1,1,1 +escort,shopping,othmaint,othdiscr,eatout,social,tot_tours +0,0,0,0,0,0,0 +0,0,0,1,0,0,1 +0,0,0,0,0,1,1 +0,0,0,1,0,1,2 +0,0,0,0,1,0,1 +0,0,0,1,1,0,2 +0,0,0,0,1,1,2 +0,0,0,1,1,1,3 +0,0,1,0,0,0,1 +0,0,1,1,0,0,2 +0,0,1,0,0,1,2 +0,0,1,1,0,1,3 +0,0,1,0,1,0,2 +0,0,1,1,1,0,3 +0,0,1,0,1,1,3 +0,0,1,1,1,1,4 +0,1,0,0,0,0,1 +0,1,0,1,0,0,2 +0,1,0,0,0,1,2 +0,1,0,1,0,1,3 +0,1,0,0,1,0,2 +0,1,0,1,1,0,3 +0,1,0,0,1,1,3 +0,1,0,1,1,1,4 +0,1,1,0,0,0,2 +0,1,1,1,0,0,3 +0,1,1,0,0,1,3 +0,1,1,1,0,1,4 +0,1,1,0,1,0,3 +0,1,1,1,1,0,4 +0,1,1,0,1,1,4 +0,1,1,1,1,1,5 +1,0,0,0,0,0,1 +1,0,0,1,0,0,2 +1,0,0,0,0,1,2 +1,0,0,1,0,1,3 +1,0,0,0,1,0,2 +1,0,0,1,1,0,3 +1,0,0,0,1,1,3 +1,0,0,1,1,1,4 +1,0,1,0,0,0,2 +1,0,1,1,0,0,3 +1,0,1,0,0,1,3 +1,0,1,1,0,1,4 +1,0,1,0,1,0,3 +1,0,1,1,1,0,4 +1,0,1,0,1,1,4 +1,0,1,1,1,1,5 +1,1,0,0,0,0,2 +1,1,0,1,0,0,3 +1,1,0,0,0,1,3 +1,1,0,1,0,1,4 +1,1,0,0,1,0,3 +1,1,0,1,1,0,4 +1,1,0,0,1,1,4 +1,1,0,1,1,1,5 +1,1,1,0,0,0,3 +1,1,1,1,0,0,4 +1,1,1,0,0,1,4 +1,1,1,1,0,1,5 +1,1,1,0,1,0,4 +1,1,1,1,1,0,5 +1,1,1,0,1,1,5 +1,1,1,1,1,1,6 +2,0,0,0,0,0,2 +2,0,0,1,0,0,3 +2,0,0,0,0,1,3 +2,0,0,1,0,1,4 +2,0,0,0,1,0,3 +2,0,0,1,1,0,4 +2,0,0,0,1,1,4 +2,0,0,1,1,1,5 +2,0,1,0,0,0,3 +2,0,1,1,0,0,4 +2,0,1,0,0,1,4 +2,0,1,1,0,1,5 +2,0,1,0,1,0,4 +2,0,1,1,1,0,5 +2,0,1,0,1,1,5 +2,0,1,1,1,1,6 +2,1,0,0,0,0,3 +2,1,0,1,0,0,4 +2,1,0,0,0,1,4 +2,1,0,1,0,1,5 +2,1,0,0,1,0,4 +2,1,0,1,1,0,5 +2,1,0,0,1,1,5 +2,1,0,1,1,1,6 +2,1,1,0,0,0,4 +2,1,1,1,0,0,5 +2,1,1,0,0,1,5 +2,1,1,1,0,1,6 +2,1,1,0,1,0,5 +2,1,1,1,1,0,6 +2,1,1,0,1,1,6 +2,1,1,1,1,1,7 diff --git a/activitysim/examples/prototype_arc/configs/non_mandatory_tour_frequency_alternatives.csv b/activitysim/examples/prototype_arc/configs/non_mandatory_tour_frequency_alternatives.csv index b9765aa75..09e89fae3 100644 --- a/activitysim/examples/prototype_arc/configs/non_mandatory_tour_frequency_alternatives.csv +++ b/activitysim/examples/prototype_arc/configs/non_mandatory_tour_frequency_alternatives.csv @@ -1,97 +1,97 @@ -escort,shopping,othmaint,othdiscr,eatout,social -0,0,0,0,0,0 -0,0,0,1,0,0 -0,0,0,0,0,1 -0,0,0,1,0,1 -0,0,0,0,1,0 -0,0,0,1,1,0 -0,0,0,0,1,1 -0,0,0,1,1,1 -0,0,1,0,0,0 -0,0,1,1,0,0 -0,0,1,0,0,1 -0,0,1,1,0,1 -0,0,1,0,1,0 -0,0,1,1,1,0 -0,0,1,0,1,1 -0,0,1,1,1,1 -0,1,0,0,0,0 -0,1,0,1,0,0 -0,1,0,0,0,1 -0,1,0,1,0,1 -0,1,0,0,1,0 -0,1,0,1,1,0 -0,1,0,0,1,1 -0,1,0,1,1,1 -0,1,1,0,0,0 -0,1,1,1,0,0 -0,1,1,0,0,1 -0,1,1,1,0,1 -0,1,1,0,1,0 -0,1,1,1,1,0 -0,1,1,0,1,1 -0,1,1,1,1,1 -1,0,0,0,0,0 -1,0,0,1,0,0 -1,0,0,0,0,1 -1,0,0,1,0,1 -1,0,0,0,1,0 -1,0,0,1,1,0 -1,0,0,0,1,1 -1,0,0,1,1,1 -1,0,1,0,0,0 -1,0,1,1,0,0 -1,0,1,0,0,1 -1,0,1,1,0,1 -1,0,1,0,1,0 -1,0,1,1,1,0 -1,0,1,0,1,1 -1,0,1,1,1,1 -1,1,0,0,0,0 -1,1,0,1,0,0 -1,1,0,0,0,1 -1,1,0,1,0,1 -1,1,0,0,1,0 -1,1,0,1,1,0 -1,1,0,0,1,1 -1,1,0,1,1,1 -1,1,1,0,0,0 -1,1,1,1,0,0 -1,1,1,0,0,1 -1,1,1,1,0,1 -1,1,1,0,1,0 -1,1,1,1,1,0 -1,1,1,0,1,1 -1,1,1,1,1,1 -2,0,0,0,0,0 -2,0,0,1,0,0 -2,0,0,0,0,1 -2,0,0,1,0,1 -2,0,0,0,1,0 -2,0,0,1,1,0 -2,0,0,0,1,1 -2,0,0,1,1,1 -2,0,1,0,0,0 -2,0,1,1,0,0 -2,0,1,0,0,1 -2,0,1,1,0,1 -2,0,1,0,1,0 -2,0,1,1,1,0 -2,0,1,0,1,1 -2,0,1,1,1,1 -2,1,0,0,0,0 -2,1,0,1,0,0 -2,1,0,0,0,1 -2,1,0,1,0,1 -2,1,0,0,1,0 -2,1,0,1,1,0 -2,1,0,0,1,1 -2,1,0,1,1,1 -2,1,1,0,0,0 -2,1,1,1,0,0 -2,1,1,0,0,1 -2,1,1,1,0,1 -2,1,1,0,1,0 -2,1,1,1,1,0 -2,1,1,0,1,1 -2,1,1,1,1,1 +escort,shopping,othmaint,othdiscr,eatout,social,tot_tours +0,0,0,0,0,0,0 +0,0,0,1,0,0,1 +0,0,0,0,0,1,1 +0,0,0,1,0,1,2 +0,0,0,0,1,0,1 +0,0,0,1,1,0,2 +0,0,0,0,1,1,2 +0,0,0,1,1,1,3 +0,0,1,0,0,0,1 +0,0,1,1,0,0,2 +0,0,1,0,0,1,2 +0,0,1,1,0,1,3 +0,0,1,0,1,0,2 +0,0,1,1,1,0,3 +0,0,1,0,1,1,3 +0,0,1,1,1,1,4 +0,1,0,0,0,0,1 +0,1,0,1,0,0,2 +0,1,0,0,0,1,2 +0,1,0,1,0,1,3 +0,1,0,0,1,0,2 +0,1,0,1,1,0,3 +0,1,0,0,1,1,3 +0,1,0,1,1,1,4 +0,1,1,0,0,0,2 +0,1,1,1,0,0,3 +0,1,1,0,0,1,3 +0,1,1,1,0,1,4 +0,1,1,0,1,0,3 +0,1,1,1,1,0,4 +0,1,1,0,1,1,4 +0,1,1,1,1,1,5 +1,0,0,0,0,0,1 +1,0,0,1,0,0,2 +1,0,0,0,0,1,2 +1,0,0,1,0,1,3 +1,0,0,0,1,0,2 +1,0,0,1,1,0,3 +1,0,0,0,1,1,3 +1,0,0,1,1,1,4 +1,0,1,0,0,0,2 +1,0,1,1,0,0,3 +1,0,1,0,0,1,3 +1,0,1,1,0,1,4 +1,0,1,0,1,0,3 +1,0,1,1,1,0,4 +1,0,1,0,1,1,4 +1,0,1,1,1,1,5 +1,1,0,0,0,0,2 +1,1,0,1,0,0,3 +1,1,0,0,0,1,3 +1,1,0,1,0,1,4 +1,1,0,0,1,0,3 +1,1,0,1,1,0,4 +1,1,0,0,1,1,4 +1,1,0,1,1,1,5 +1,1,1,0,0,0,3 +1,1,1,1,0,0,4 +1,1,1,0,0,1,4 +1,1,1,1,0,1,5 +1,1,1,0,1,0,4 +1,1,1,1,1,0,5 +1,1,1,0,1,1,5 +1,1,1,1,1,1,6 +2,0,0,0,0,0,2 +2,0,0,1,0,0,3 +2,0,0,0,0,1,3 +2,0,0,1,0,1,4 +2,0,0,0,1,0,3 +2,0,0,1,1,0,4 +2,0,0,0,1,1,4 +2,0,0,1,1,1,5 +2,0,1,0,0,0,3 +2,0,1,1,0,0,4 +2,0,1,0,0,1,4 +2,0,1,1,0,1,5 +2,0,1,0,1,0,4 +2,0,1,1,1,0,5 +2,0,1,0,1,1,5 +2,0,1,1,1,1,6 +2,1,0,0,0,0,3 +2,1,0,1,0,0,4 +2,1,0,0,0,1,4 +2,1,0,1,0,1,5 +2,1,0,0,1,0,4 +2,1,0,1,1,0,5 +2,1,0,0,1,1,5 +2,1,0,1,1,1,6 +2,1,1,0,0,0,4 +2,1,1,1,0,0,5 +2,1,1,0,0,1,5 +2,1,1,1,0,1,6 +2,1,1,0,1,0,5 +2,1,1,1,1,0,6 +2,1,1,0,1,1,6 +2,1,1,1,1,1,7 diff --git a/activitysim/examples/prototype_mtc/configs/non_mandatory_tour_frequency_alternatives.csv b/activitysim/examples/prototype_mtc/configs/non_mandatory_tour_frequency_alternatives.csv index b9765aa75..09e89fae3 100644 --- a/activitysim/examples/prototype_mtc/configs/non_mandatory_tour_frequency_alternatives.csv +++ b/activitysim/examples/prototype_mtc/configs/non_mandatory_tour_frequency_alternatives.csv @@ -1,97 +1,97 @@ -escort,shopping,othmaint,othdiscr,eatout,social -0,0,0,0,0,0 -0,0,0,1,0,0 -0,0,0,0,0,1 -0,0,0,1,0,1 -0,0,0,0,1,0 -0,0,0,1,1,0 -0,0,0,0,1,1 -0,0,0,1,1,1 -0,0,1,0,0,0 -0,0,1,1,0,0 -0,0,1,0,0,1 -0,0,1,1,0,1 -0,0,1,0,1,0 -0,0,1,1,1,0 -0,0,1,0,1,1 -0,0,1,1,1,1 -0,1,0,0,0,0 -0,1,0,1,0,0 -0,1,0,0,0,1 -0,1,0,1,0,1 -0,1,0,0,1,0 -0,1,0,1,1,0 -0,1,0,0,1,1 -0,1,0,1,1,1 -0,1,1,0,0,0 -0,1,1,1,0,0 -0,1,1,0,0,1 -0,1,1,1,0,1 -0,1,1,0,1,0 -0,1,1,1,1,0 -0,1,1,0,1,1 -0,1,1,1,1,1 -1,0,0,0,0,0 -1,0,0,1,0,0 -1,0,0,0,0,1 -1,0,0,1,0,1 -1,0,0,0,1,0 -1,0,0,1,1,0 -1,0,0,0,1,1 -1,0,0,1,1,1 -1,0,1,0,0,0 -1,0,1,1,0,0 -1,0,1,0,0,1 -1,0,1,1,0,1 -1,0,1,0,1,0 -1,0,1,1,1,0 -1,0,1,0,1,1 -1,0,1,1,1,1 -1,1,0,0,0,0 -1,1,0,1,0,0 -1,1,0,0,0,1 -1,1,0,1,0,1 -1,1,0,0,1,0 -1,1,0,1,1,0 -1,1,0,0,1,1 -1,1,0,1,1,1 -1,1,1,0,0,0 -1,1,1,1,0,0 -1,1,1,0,0,1 -1,1,1,1,0,1 -1,1,1,0,1,0 -1,1,1,1,1,0 -1,1,1,0,1,1 -1,1,1,1,1,1 -2,0,0,0,0,0 -2,0,0,1,0,0 -2,0,0,0,0,1 -2,0,0,1,0,1 -2,0,0,0,1,0 -2,0,0,1,1,0 -2,0,0,0,1,1 -2,0,0,1,1,1 -2,0,1,0,0,0 -2,0,1,1,0,0 -2,0,1,0,0,1 -2,0,1,1,0,1 -2,0,1,0,1,0 -2,0,1,1,1,0 -2,0,1,0,1,1 -2,0,1,1,1,1 -2,1,0,0,0,0 -2,1,0,1,0,0 -2,1,0,0,0,1 -2,1,0,1,0,1 -2,1,0,0,1,0 -2,1,0,1,1,0 -2,1,0,0,1,1 -2,1,0,1,1,1 -2,1,1,0,0,0 -2,1,1,1,0,0 -2,1,1,0,0,1 -2,1,1,1,0,1 -2,1,1,0,1,0 -2,1,1,1,1,0 -2,1,1,0,1,1 -2,1,1,1,1,1 +escort,shopping,othmaint,othdiscr,eatout,social,tot_tours +0,0,0,0,0,0,0 +0,0,0,1,0,0,1 +0,0,0,0,0,1,1 +0,0,0,1,0,1,2 +0,0,0,0,1,0,1 +0,0,0,1,1,0,2 +0,0,0,0,1,1,2 +0,0,0,1,1,1,3 +0,0,1,0,0,0,1 +0,0,1,1,0,0,2 +0,0,1,0,0,1,2 +0,0,1,1,0,1,3 +0,0,1,0,1,0,2 +0,0,1,1,1,0,3 +0,0,1,0,1,1,3 +0,0,1,1,1,1,4 +0,1,0,0,0,0,1 +0,1,0,1,0,0,2 +0,1,0,0,0,1,2 +0,1,0,1,0,1,3 +0,1,0,0,1,0,2 +0,1,0,1,1,0,3 +0,1,0,0,1,1,3 +0,1,0,1,1,1,4 +0,1,1,0,0,0,2 +0,1,1,1,0,0,3 +0,1,1,0,0,1,3 +0,1,1,1,0,1,4 +0,1,1,0,1,0,3 +0,1,1,1,1,0,4 +0,1,1,0,1,1,4 +0,1,1,1,1,1,5 +1,0,0,0,0,0,1 +1,0,0,1,0,0,2 +1,0,0,0,0,1,2 +1,0,0,1,0,1,3 +1,0,0,0,1,0,2 +1,0,0,1,1,0,3 +1,0,0,0,1,1,3 +1,0,0,1,1,1,4 +1,0,1,0,0,0,2 +1,0,1,1,0,0,3 +1,0,1,0,0,1,3 +1,0,1,1,0,1,4 +1,0,1,0,1,0,3 +1,0,1,1,1,0,4 +1,0,1,0,1,1,4 +1,0,1,1,1,1,5 +1,1,0,0,0,0,2 +1,1,0,1,0,0,3 +1,1,0,0,0,1,3 +1,1,0,1,0,1,4 +1,1,0,0,1,0,3 +1,1,0,1,1,0,4 +1,1,0,0,1,1,4 +1,1,0,1,1,1,5 +1,1,1,0,0,0,3 +1,1,1,1,0,0,4 +1,1,1,0,0,1,4 +1,1,1,1,0,1,5 +1,1,1,0,1,0,4 +1,1,1,1,1,0,5 +1,1,1,0,1,1,5 +1,1,1,1,1,1,6 +2,0,0,0,0,0,2 +2,0,0,1,0,0,3 +2,0,0,0,0,1,3 +2,0,0,1,0,1,4 +2,0,0,0,1,0,3 +2,0,0,1,1,0,4 +2,0,0,0,1,1,4 +2,0,0,1,1,1,5 +2,0,1,0,0,0,3 +2,0,1,1,0,0,4 +2,0,1,0,0,1,4 +2,0,1,1,0,1,5 +2,0,1,0,1,0,4 +2,0,1,1,1,0,5 +2,0,1,0,1,1,5 +2,0,1,1,1,1,6 +2,1,0,0,0,0,3 +2,1,0,1,0,0,4 +2,1,0,0,0,1,4 +2,1,0,1,0,1,5 +2,1,0,0,1,0,4 +2,1,0,1,1,0,5 +2,1,0,0,1,1,5 +2,1,0,1,1,1,6 +2,1,1,0,0,0,4 +2,1,1,1,0,0,5 +2,1,1,0,0,1,5 +2,1,1,1,0,1,6 +2,1,1,0,1,0,5 +2,1,1,1,1,0,6 +2,1,1,0,1,1,6 +2,1,1,1,1,1,7 diff --git a/activitysim/examples/prototype_mtc_extended/configs/non_mandatory_tour_frequency_alternatives.csv b/activitysim/examples/prototype_mtc_extended/configs/non_mandatory_tour_frequency_alternatives.csv index 0bea47c6f..be633e649 100644 --- a/activitysim/examples/prototype_mtc_extended/configs/non_mandatory_tour_frequency_alternatives.csv +++ b/activitysim/examples/prototype_mtc_extended/configs/non_mandatory_tour_frequency_alternatives.csv @@ -1,100 +1,100 @@ -escort,shopping,othmaint,othdiscr,eatout,social -0,0,0,0,0,0 -0,0,0,1,0,0 -0,0,0,0,0,1 -0,0,0,1,0,1 -0,0,0,0,1,0 -0,0,0,1,1,0 -0,0,0,0,1,1 -0,0,0,1,1,1 -0,0,1,0,0,0 -0,0,1,1,0,0 -0,0,1,0,0,1 -0,0,1,1,0,1 -0,0,1,0,1,0 -0,0,1,1,1,0 -0,0,1,0,1,1 -0,0,1,1,1,1 -0,1,0,0,0,0 -0,1,0,1,0,0 -0,1,0,0,0,1 -0,1,0,1,0,1 -0,1,0,0,1,0 -0,1,0,1,1,0 -0,1,0,0,1,1 -0,1,0,1,1,1 -0,1,1,0,0,0 -0,1,1,1,0,0 -0,1,1,0,0,1 -0,1,1,1,0,1 -0,1,1,0,1,0 -0,1,1,1,1,0 -0,1,1,0,1,1 -0,1,1,1,1,1 -1,0,0,0,0,0 -1,0,0,1,0,0 -1,0,0,0,0,1 -1,0,0,1,0,1 -1,0,0,0,1,0 -1,0,0,1,1,0 -1,0,0,0,1,1 -1,0,0,1,1,1 -1,0,1,0,0,0 -1,0,1,1,0,0 -1,0,1,0,0,1 -1,0,1,1,0,1 -1,0,1,0,1,0 -1,0,1,1,1,0 -1,0,1,0,1,1 -1,0,1,1,1,1 -1,1,0,0,0,0 -1,1,0,1,0,0 -1,1,0,0,0,1 -1,1,0,1,0,1 -1,1,0,0,1,0 -1,1,0,1,1,0 -1,1,0,0,1,1 -1,1,0,1,1,1 -1,1,1,0,0,0 -1,1,1,1,0,0 -1,1,1,0,0,1 -1,1,1,1,0,1 -1,1,1,0,1,0 -1,1,1,1,1,0 -1,1,1,0,1,1 -1,1,1,1,1,1 -2,0,0,0,0,0 -2,0,0,1,0,0 -2,0,0,0,0,1 -2,0,0,1,0,1 -2,0,0,0,1,0 -2,0,0,1,1,0 -2,0,0,0,1,1 -2,0,0,1,1,1 -2,0,1,0,0,0 -2,0,1,1,0,0 -2,0,1,0,0,1 -2,0,1,1,0,1 -2,0,1,0,1,0 -2,0,1,1,1,0 -2,0,1,0,1,1 -2,0,1,1,1,1 -2,1,0,0,0,0 -2,1,0,1,0,0 -2,1,0,0,0,1 -2,1,0,1,0,1 -2,1,0,0,1,0 -2,1,0,1,1,0 -2,1,0,0,1,1 -2,1,0,1,1,1 -2,1,1,0,0,0 -2,1,1,1,0,0 -2,1,1,0,0,1 -2,1,1,1,0,1 -2,1,1,0,1,0 -2,1,1,1,1,0 -2,1,1,0,1,1 -2,1,1,1,1,1 -# extension for flexible ids demonstration,,,,, -# should be removed for actual model run,,,,, -0,0,0,2,0,0 +escort,shopping,othmaint,othdiscr,eatout,social,tot_tours +0,0,0,0,0,0,0 +0,0,0,1,0,0,1 +0,0,0,0,0,1,1 +0,0,0,1,0,1,2 +0,0,0,0,1,0,1 +0,0,0,1,1,0,2 +0,0,0,0,1,1,2 +0,0,0,1,1,1,3 +0,0,1,0,0,0,1 +0,0,1,1,0,0,2 +0,0,1,0,0,1,2 +0,0,1,1,0,1,3 +0,0,1,0,1,0,2 +0,0,1,1,1,0,3 +0,0,1,0,1,1,3 +0,0,1,1,1,1,4 +0,1,0,0,0,0,1 +0,1,0,1,0,0,2 +0,1,0,0,0,1,2 +0,1,0,1,0,1,3 +0,1,0,0,1,0,2 +0,1,0,1,1,0,3 +0,1,0,0,1,1,3 +0,1,0,1,1,1,4 +0,1,1,0,0,0,2 +0,1,1,1,0,0,3 +0,1,1,0,0,1,3 +0,1,1,1,0,1,4 +0,1,1,0,1,0,3 +0,1,1,1,1,0,4 +0,1,1,0,1,1,4 +0,1,1,1,1,1,5 +1,0,0,0,0,0,1 +1,0,0,1,0,0,2 +1,0,0,0,0,1,2 +1,0,0,1,0,1,3 +1,0,0,0,1,0,2 +1,0,0,1,1,0,3 +1,0,0,0,1,1,3 +1,0,0,1,1,1,4 +1,0,1,0,0,0,2 +1,0,1,1,0,0,3 +1,0,1,0,0,1,3 +1,0,1,1,0,1,4 +1,0,1,0,1,0,3 +1,0,1,1,1,0,4 +1,0,1,0,1,1,4 +1,0,1,1,1,1,5 +1,1,0,0,0,0,2 +1,1,0,1,0,0,3 +1,1,0,0,0,1,3 +1,1,0,1,0,1,4 +1,1,0,0,1,0,3 +1,1,0,1,1,0,4 +1,1,0,0,1,1,4 +1,1,0,1,1,1,5 +1,1,1,0,0,0,3 +1,1,1,1,0,0,4 +1,1,1,0,0,1,4 +1,1,1,1,0,1,5 +1,1,1,0,1,0,4 +1,1,1,1,1,0,5 +1,1,1,0,1,1,5 +1,1,1,1,1,1,6 +2,0,0,0,0,0,2 +2,0,0,1,0,0,3 +2,0,0,0,0,1,3 +2,0,0,1,0,1,4 +2,0,0,0,1,0,3 +2,0,0,1,1,0,4 +2,0,0,0,1,1,4 +2,0,0,1,1,1,5 +2,0,1,0,0,0,3 +2,0,1,1,0,0,4 +2,0,1,0,0,1,4 +2,0,1,1,0,1,5 +2,0,1,0,1,0,4 +2,0,1,1,1,0,5 +2,0,1,0,1,1,5 +2,0,1,1,1,1,6 +2,1,0,0,0,0,3 +2,1,0,1,0,0,4 +2,1,0,0,0,1,4 +2,1,0,1,0,1,5 +2,1,0,0,1,0,4 +2,1,0,1,1,0,5 +2,1,0,0,1,1,5 +2,1,0,1,1,1,6 +2,1,1,0,0,0,4 +2,1,1,1,0,0,5 +2,1,1,0,0,1,5 +2,1,1,1,0,1,6 +2,1,1,0,1,0,5 +2,1,1,1,1,0,6 +2,1,1,0,1,1,6 +2,1,1,1,1,1,7 +# extension for flexible ids demonstration,,,,,,0 +# should be removed for actual model run,,,,,,0 +0,0,0,2,0,0,2 diff --git a/activitysim/examples/prototype_mwcog/configs/non_mandatory_tour_frequency_alternatives.csv b/activitysim/examples/prototype_mwcog/configs/non_mandatory_tour_frequency_alternatives.csv index b9765aa75..09e89fae3 100644 --- a/activitysim/examples/prototype_mwcog/configs/non_mandatory_tour_frequency_alternatives.csv +++ b/activitysim/examples/prototype_mwcog/configs/non_mandatory_tour_frequency_alternatives.csv @@ -1,97 +1,97 @@ -escort,shopping,othmaint,othdiscr,eatout,social -0,0,0,0,0,0 -0,0,0,1,0,0 -0,0,0,0,0,1 -0,0,0,1,0,1 -0,0,0,0,1,0 -0,0,0,1,1,0 -0,0,0,0,1,1 -0,0,0,1,1,1 -0,0,1,0,0,0 -0,0,1,1,0,0 -0,0,1,0,0,1 -0,0,1,1,0,1 -0,0,1,0,1,0 -0,0,1,1,1,0 -0,0,1,0,1,1 -0,0,1,1,1,1 -0,1,0,0,0,0 -0,1,0,1,0,0 -0,1,0,0,0,1 -0,1,0,1,0,1 -0,1,0,0,1,0 -0,1,0,1,1,0 -0,1,0,0,1,1 -0,1,0,1,1,1 -0,1,1,0,0,0 -0,1,1,1,0,0 -0,1,1,0,0,1 -0,1,1,1,0,1 -0,1,1,0,1,0 -0,1,1,1,1,0 -0,1,1,0,1,1 -0,1,1,1,1,1 -1,0,0,0,0,0 -1,0,0,1,0,0 -1,0,0,0,0,1 -1,0,0,1,0,1 -1,0,0,0,1,0 -1,0,0,1,1,0 -1,0,0,0,1,1 -1,0,0,1,1,1 -1,0,1,0,0,0 -1,0,1,1,0,0 -1,0,1,0,0,1 -1,0,1,1,0,1 -1,0,1,0,1,0 -1,0,1,1,1,0 -1,0,1,0,1,1 -1,0,1,1,1,1 -1,1,0,0,0,0 -1,1,0,1,0,0 -1,1,0,0,0,1 -1,1,0,1,0,1 -1,1,0,0,1,0 -1,1,0,1,1,0 -1,1,0,0,1,1 -1,1,0,1,1,1 -1,1,1,0,0,0 -1,1,1,1,0,0 -1,1,1,0,0,1 -1,1,1,1,0,1 -1,1,1,0,1,0 -1,1,1,1,1,0 -1,1,1,0,1,1 -1,1,1,1,1,1 -2,0,0,0,0,0 -2,0,0,1,0,0 -2,0,0,0,0,1 -2,0,0,1,0,1 -2,0,0,0,1,0 -2,0,0,1,1,0 -2,0,0,0,1,1 -2,0,0,1,1,1 -2,0,1,0,0,0 -2,0,1,1,0,0 -2,0,1,0,0,1 -2,0,1,1,0,1 -2,0,1,0,1,0 -2,0,1,1,1,0 -2,0,1,0,1,1 -2,0,1,1,1,1 -2,1,0,0,0,0 -2,1,0,1,0,0 -2,1,0,0,0,1 -2,1,0,1,0,1 -2,1,0,0,1,0 -2,1,0,1,1,0 -2,1,0,0,1,1 -2,1,0,1,1,1 -2,1,1,0,0,0 -2,1,1,1,0,0 -2,1,1,0,0,1 -2,1,1,1,0,1 -2,1,1,0,1,0 -2,1,1,1,1,0 -2,1,1,0,1,1 -2,1,1,1,1,1 +escort,shopping,othmaint,othdiscr,eatout,social,tot_tours +0,0,0,0,0,0,0 +0,0,0,1,0,0,1 +0,0,0,0,0,1,1 +0,0,0,1,0,1,2 +0,0,0,0,1,0,1 +0,0,0,1,1,0,2 +0,0,0,0,1,1,2 +0,0,0,1,1,1,3 +0,0,1,0,0,0,1 +0,0,1,1,0,0,2 +0,0,1,0,0,1,2 +0,0,1,1,0,1,3 +0,0,1,0,1,0,2 +0,0,1,1,1,0,3 +0,0,1,0,1,1,3 +0,0,1,1,1,1,4 +0,1,0,0,0,0,1 +0,1,0,1,0,0,2 +0,1,0,0,0,1,2 +0,1,0,1,0,1,3 +0,1,0,0,1,0,2 +0,1,0,1,1,0,3 +0,1,0,0,1,1,3 +0,1,0,1,1,1,4 +0,1,1,0,0,0,2 +0,1,1,1,0,0,3 +0,1,1,0,0,1,3 +0,1,1,1,0,1,4 +0,1,1,0,1,0,3 +0,1,1,1,1,0,4 +0,1,1,0,1,1,4 +0,1,1,1,1,1,5 +1,0,0,0,0,0,1 +1,0,0,1,0,0,2 +1,0,0,0,0,1,2 +1,0,0,1,0,1,3 +1,0,0,0,1,0,2 +1,0,0,1,1,0,3 +1,0,0,0,1,1,3 +1,0,0,1,1,1,4 +1,0,1,0,0,0,2 +1,0,1,1,0,0,3 +1,0,1,0,0,1,3 +1,0,1,1,0,1,4 +1,0,1,0,1,0,3 +1,0,1,1,1,0,4 +1,0,1,0,1,1,4 +1,0,1,1,1,1,5 +1,1,0,0,0,0,2 +1,1,0,1,0,0,3 +1,1,0,0,0,1,3 +1,1,0,1,0,1,4 +1,1,0,0,1,0,3 +1,1,0,1,1,0,4 +1,1,0,0,1,1,4 +1,1,0,1,1,1,5 +1,1,1,0,0,0,3 +1,1,1,1,0,0,4 +1,1,1,0,0,1,4 +1,1,1,1,0,1,5 +1,1,1,0,1,0,4 +1,1,1,1,1,0,5 +1,1,1,0,1,1,5 +1,1,1,1,1,1,6 +2,0,0,0,0,0,2 +2,0,0,1,0,0,3 +2,0,0,0,0,1,3 +2,0,0,1,0,1,4 +2,0,0,0,1,0,3 +2,0,0,1,1,0,4 +2,0,0,0,1,1,4 +2,0,0,1,1,1,5 +2,0,1,0,0,0,3 +2,0,1,1,0,0,4 +2,0,1,0,0,1,4 +2,0,1,1,0,1,5 +2,0,1,0,1,0,4 +2,0,1,1,1,0,5 +2,0,1,0,1,1,5 +2,0,1,1,1,1,6 +2,1,0,0,0,0,3 +2,1,0,1,0,0,4 +2,1,0,0,0,1,4 +2,1,0,1,0,1,5 +2,1,0,0,1,0,4 +2,1,0,1,1,0,5 +2,1,0,0,1,1,5 +2,1,0,1,1,1,6 +2,1,1,0,0,0,4 +2,1,1,1,0,0,5 +2,1,1,0,0,1,5 +2,1,1,1,0,1,6 +2,1,1,0,1,0,5 +2,1,1,1,1,0,6 +2,1,1,0,1,1,6 +2,1,1,1,1,1,7 diff --git a/conda-environments/activitysim-dev-base.yml b/conda-environments/activitysim-dev-base.yml index 7a94ae4e1..ec1f2e0f2 100644 --- a/conda-environments/activitysim-dev-base.yml +++ b/conda-environments/activitysim-dev-base.yml @@ -49,7 +49,7 @@ dependencies: - psutil = 5.9.* - pyarrow = 11.* - pycodestyle -- pydantic = 1.10.* +- pydantic = 2.6.* - pydata-sphinx-theme - pyinstrument = 4.4 - pypyr = 5.8.* @@ -75,4 +75,4 @@ dependencies: - zstandard - pip: - - autodoc_pydantic >=1.9,<2.0 + - autodoc_pydantic diff --git a/conda-environments/activitysim-dev.yml b/conda-environments/activitysim-dev.yml index dd1437581..f805b6e36 100644 --- a/conda-environments/activitysim-dev.yml +++ b/conda-environments/activitysim-dev.yml @@ -31,6 +31,7 @@ dependencies: - myst-parser # allows markdown in sphinx - nbconvert - nbformat +- nbmake - numba = 0.56.* - numexpr - numpy = 1.23.* @@ -44,7 +45,7 @@ dependencies: - psutil = 5.9.* - pyarrow = 11.* - pycodestyle -- pydantic = 1.10.* +- pydantic = 2.6.* - pydata-sphinx-theme - pyinstrument = 4.4 - pypyr = 5.8.* @@ -71,5 +72,5 @@ dependencies: - zstandard - pip: - - autodoc_pydantic >=1.9,<2.0 + - autodoc_pydantic - -e .. diff --git a/conda-environments/docbuild.yml b/conda-environments/docbuild.yml index 2e289fa9f..c738ab3a1 100644 --- a/conda-environments/docbuild.yml +++ b/conda-environments/docbuild.yml @@ -36,7 +36,7 @@ dependencies: - platformdirs - psutil >= 4.1 - pyarrow >= 2.0 -- pydantic = 1.10.* +- pydantic = 2.6.* - pypyr >= 5.3 - pytables >=3.7 - pytest @@ -56,5 +56,5 @@ dependencies: - zarr - pip: - - autodoc_pydantic >=1.9,<2.0 + - autodoc_pydantic - -e .. diff --git a/conda-environments/github-actions-tests.yml b/conda-environments/github-actions-tests.yml index 3636f4bc0..5edb8fef7 100644 --- a/conda-environments/github-actions-tests.yml +++ b/conda-environments/github-actions-tests.yml @@ -22,7 +22,7 @@ dependencies: - platformdirs = 3.2.* - psutil = 5.9.* - pyarrow = 11.* -- pydantic = 1.10.* +- pydantic = 2.6.* - pypyr = 5.8.* - pytables >= 3.7 - pytest = 7.2.* diff --git a/setup.cfg b/setup.cfg index a33f1b3d8..6051b75af 100644 --- a/setup.cfg +++ b/setup.cfg @@ -33,6 +33,7 @@ install_requires = platformdirs psutil >= 4.1 pyarrow >= 2.0 + pydantic >= 2.6 pypyr >= 5.3 pyyaml >= 5.1 requests >= 2.7