diff --git a/CHANGES.rst b/CHANGES.rst index 6ea05d8c..c306aa82 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -13,6 +13,13 @@ Changelog of threedi-modelchecker - Removed the summarize (--sum, --no-sum) option from the command-line interface. +- Complete run through of the checks. + +- Added an initial migration (0200) that adds the tables only when necessary. In + this way, empty and existing sqlite files can be initialized. + +- Added a migration (0201) that replaces friction_type=4 with 2. + 0.14 (2021-07-29) ----------------- diff --git a/README.rst b/README.rst index 58571abe..508ad3b0 100644 --- a/README.rst +++ b/README.rst @@ -52,11 +52,21 @@ Command-line interface Use the modelchecker from the command line as follows:: - threedi-modelchecker -l warning sqlite -s path/to/model.sqlite + threedi_modelchecker -s path/to/model.sqlite check -l warning By default, WARNING and INFO checks are ignored. +Migrations +---------- + +Migrate the schematisation file to the latest version as follows:: + + threedi_modelchecker -s path/to/model.sqlite migrate + +The file will be change in-place. + + Development ----------- diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 00000000..51798c1b --- /dev/null +++ b/alembic.ini @@ -0,0 +1,91 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = threedi_modelchecker/migrations + + +version_table = schema_version +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat alembic/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = sqlite:/// + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/setup.py b/setup.py index 145c9719..45bfa530 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ python_requires='>=3.6', entry_points={ "console_scripts": [ - "threedi-modelchecker = threedi_modelchecker.scripts:check_model" + "threedi_modelchecker = threedi_modelchecker.scripts:threedi_modelchecker" ] }, ) diff --git a/tests/test_schema.py b/tests/test_schema.py index 022db267..1a4f6f1a 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -122,7 +122,7 @@ def test_upgrade_with_preexisting_version(in_memory_sqlite): schema = ModelSchema(in_memory_sqlite) with mock.patch.object(schema, "get_version", return_value=199): - schema.upgrade(backup=False) + schema.upgrade(backup=False, revision="0201") assert in_memory_sqlite.get_engine().has_table("v2_connection_nodes") diff --git a/threedi_modelchecker/migrations/env.py b/threedi_modelchecker/migrations/env.py index 1cf5d06c..b2565d7e 100644 --- a/threedi_modelchecker/migrations/env.py +++ b/threedi_modelchecker/migrations/env.py @@ -1,11 +1,24 @@ from alembic import context +from sqlalchemy import create_engine from threedi_modelchecker.threedi_model.models import Base +import os +import threedi_modelchecker.threedi_model.models # NOQA needed for autogenerate + target_metadata = Base.metadata config = context.config +def get_url(): + db_url = os.environ.get("DB_URL") + if not db_url: + raise RuntimeError( + "Database URL must be specified using the environment variable DB_URL" + ) + return db_url + + def run_migrations_online(): """Run migrations in 'online' mode. @@ -13,6 +26,8 @@ def run_migrations_online(): SQLite before running migrations. """ connectable = config.attributes.get("connection") + if connectable is None: + connectable = create_engine(get_url()) with connectable.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata) diff --git a/threedi_modelchecker/migrations/versions/0001_initial.py b/threedi_modelchecker/migrations/versions/0001_initial.py deleted file mode 100644 index 893260e1..00000000 --- a/threedi_modelchecker/migrations/versions/0001_initial.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Initial migration - -Revision ID: 0001 -Revises: -Create Date: 2021-02-15 16:31:00.792077 - -""" -from alembic import op - -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = "0200" -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - "v2_connection_nodes", sa.Column("id", sa.Integer, primary_key=True) - ) - - -def downgrade(): - op.drop_table("v2_connection_nodes") diff --git a/threedi_modelchecker/migrations/versions/0200_initial.py b/threedi_modelchecker/migrations/versions/0200_initial.py new file mode 100644 index 00000000..750b5cb2 --- /dev/null +++ b/threedi_modelchecker/migrations/versions/0200_initial.py @@ -0,0 +1,801 @@ +"""Create all tables if they do not exist already. + +Revision ID: 0200 +Revises: +Create Date: 2021-02-15 16:31:00.792077 + +""" +from alembic import op +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.schema import CreateTable + +import geoalchemy2 +import re +import sqlalchemy as sa + + +# CREATE TABLE IF NOT EXISTS hack +# See https://github.com/sqlalchemy/alembic/issues/151 + + +@compiles(CreateTable) +def _add_if_not_exists(element, compiler, **kw): + output = compiler.visit_create_table(element, **kw) + if element.element.info.get("ifexists"): + output = re.sub(r"^\s*CREATE TABLE", "CREATE TABLE IF NOT EXISTS", output, re.S) + return output + + +# revision identifiers, used by Alembic. +revision = "0200" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "v2_2d_boundary_conditions", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("timeseries", sa.Text(), nullable=True), + sa.Column("boundary_type", sa.Integer(), nullable=False), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="LINESTRING", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_2d_lateral", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("type", sa.Integer(), nullable=False), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="POINT", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.Column("timeseries", sa.Text(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_calculation_point", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("content_type_id", sa.Integer(), nullable=True), + sa.Column("user_ref", sa.String(length=80), nullable=False), + sa.Column("calc_type", sa.Integer(), nullable=True), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="POINT", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_connection_nodes", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("storage_area", sa.Float(), nullable=True), + sa.Column("initial_waterlevel", sa.Float(), nullable=True), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="POINT", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.Column("code", sa.String(length=100), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_control_delta", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("measure_variable", sa.String(length=50), nullable=True), + sa.Column("measure_delta", sa.String(length=50), nullable=True), + sa.Column("measure_dt", sa.Float(), nullable=True), + sa.Column("action_type", sa.String(length=50), nullable=True), + sa.Column("action_value", sa.String(length=50), nullable=True), + sa.Column("action_time", sa.Float(), nullable=True), + sa.Column("target_type", sa.String(length=100), nullable=True), + sa.Column("target_id", sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_control_group", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=100), nullable=True), + sa.Column("description", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_control_measure_group", + sa.Column("id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_control_memory", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("measure_variable", sa.String(length=50), nullable=True), + sa.Column("upper_threshold", sa.Float(), nullable=True), + sa.Column("lower_threshold", sa.Float(), nullable=True), + sa.Column("action_type", sa.String(length=50), nullable=True), + sa.Column("action_value", sa.String(length=50), nullable=True), + sa.Column("target_type", sa.String(length=100), nullable=True), + sa.Column("target_id", sa.Integer(), nullable=True), + sa.Column("is_active", sa.Boolean(), nullable=False), + sa.Column("is_inverse", sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_control_pid", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("measure_variable", sa.String(length=50), nullable=True), + sa.Column("setpoint", sa.Float(), nullable=True), + sa.Column("kp", sa.Float(), nullable=True), + sa.Column("ki", sa.Float(), nullable=True), + sa.Column("kd", sa.Float(), nullable=True), + sa.Column("action_type", sa.String(length=50), nullable=True), + sa.Column("target_type", sa.String(length=100), nullable=True), + sa.Column("target_upper_limit", sa.String(length=50), nullable=True), + sa.Column("target_lower_limit", sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_control_table", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("action_table", sa.Text(), nullable=True), + sa.Column("action_type", sa.String(length=50), nullable=True), + sa.Column("measure_variable", sa.String(length=50), nullable=True), + sa.Column("measure_operator", sa.String(length=2), nullable=True), + sa.Column("target_type", sa.String(length=100), nullable=True), + sa.Column("target_id", sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_control_timed", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("action_type", sa.String(length=50), nullable=True), + sa.Column("action_table", sa.Text(), nullable=True), + sa.Column("target_type", sa.String(length=100), nullable=True), + sa.Column("target_id", sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_cross_section_definition", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("width", sa.String(length=255), nullable=True), + sa.Column("height", sa.String(length=255), nullable=True), + sa.Column("shape", sa.Integer(), nullable=True), + sa.Column("code", sa.String(length=100), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_dem_average_area", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="POLYGON", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_floodfill", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("waterlevel", sa.Float(), nullable=True), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="POINT", + srid=4326, + management=True, + ), + nullable=True, + ), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_grid_refinement", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("refinement_level", sa.Integer(), nullable=False), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="LINESTRING", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.Column("code", sa.String(length=100), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_grid_refinement_area", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("refinement_level", sa.Integer(), nullable=False), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="POLYGON", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_groundwater", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("groundwater_impervious_layer_level", sa.Float(), nullable=True), + sa.Column( + "groundwater_impervious_layer_level_file", + sa.String(length=255), + nullable=True, + ), + sa.Column( + "groundwater_impervious_layer_level_type", sa.Integer(), nullable=True + ), + sa.Column("phreatic_storage_capacity", sa.Float(), nullable=True), + sa.Column( + "phreatic_storage_capacity_file", sa.String(length=255), nullable=True + ), + sa.Column("phreatic_storage_capacity_type", sa.Integer(), nullable=True), + sa.Column("equilibrium_infiltration_rate", sa.Float(), nullable=True), + sa.Column( + "equilibrium_infiltration_rate_file", sa.String(length=255), nullable=True + ), + sa.Column("equilibrium_infiltration_rate_type", sa.Integer(), nullable=True), + sa.Column("initial_infiltration_rate", sa.Float(), nullable=True), + sa.Column( + "initial_infiltration_rate_file", sa.String(length=255), nullable=True + ), + sa.Column("initial_infiltration_rate_type", sa.Integer(), nullable=True), + sa.Column("infiltration_decay_period", sa.Float(), nullable=True), + sa.Column( + "infiltration_decay_period_file", sa.String(length=255), nullable=True + ), + sa.Column("infiltration_decay_period_type", sa.Integer(), nullable=True), + sa.Column("groundwater_hydro_connectivity", sa.Float(), nullable=True), + sa.Column( + "groundwater_hydro_connectivity_file", sa.String(length=255), nullable=True + ), + sa.Column("groundwater_hydro_connectivity_type", sa.Integer(), nullable=True), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("leakage", sa.Float(), nullable=True), + sa.Column("leakage_file", sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_impervious_surface", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("surface_inclination", sa.String(length=64), nullable=False), + sa.Column("surface_class", sa.String(length=128), nullable=False), + sa.Column("surface_sub_class", sa.String(length=128), nullable=True), + sa.Column("zoom_category", sa.Integer(), nullable=True), + sa.Column("nr_of_inhabitants", sa.Float(), nullable=True), + sa.Column("area", sa.Float(), nullable=True), + sa.Column("dry_weather_flow", sa.Float(), nullable=True), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="POLYGON", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_interflow", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("interflow_type", sa.Integer(), nullable=False), + sa.Column("porosity", sa.Float(), nullable=True), + sa.Column("porosity_file", sa.String(length=255), nullable=True), + sa.Column("porosity_layer_thickness", sa.Float(), nullable=True), + sa.Column("impervious_layer_elevation", sa.Float(), nullable=True), + sa.Column("hydraulic_conductivity", sa.Float(), nullable=True), + sa.Column("hydraulic_conductivity_file", sa.String(length=255), nullable=True), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_levee", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("crest_level", sa.Float(), nullable=True), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="LINESTRING", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.Column("material", sa.Integer(), nullable=True), + sa.Column("max_breach_depth", sa.Float(), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_numerical_settings", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("cfl_strictness_factor_1d", sa.Float(), nullable=True), + sa.Column("cfl_strictness_factor_2d", sa.Float(), nullable=True), + sa.Column("convergence_cg", sa.Float(), nullable=True), + sa.Column("convergence_eps", sa.Float(), nullable=True), + sa.Column("flow_direction_threshold", sa.Float(), nullable=True), + sa.Column("frict_shallow_water_correction", sa.Integer(), nullable=True), + sa.Column("general_numerical_threshold", sa.Float(), nullable=True), + sa.Column("integration_method", sa.Integer(), nullable=True), + sa.Column("limiter_grad_1d", sa.Integer(), nullable=True), + sa.Column("limiter_grad_2d", sa.Integer(), nullable=True), + sa.Column("limiter_slope_crossectional_area_2d", sa.Integer(), nullable=True), + sa.Column("limiter_slope_friction_2d", sa.Integer(), nullable=True), + sa.Column("max_nonlin_iterations", sa.Integer(), nullable=True), + sa.Column("max_degree", sa.Integer(), nullable=False), + sa.Column("minimum_friction_velocity", sa.Float(), nullable=True), + sa.Column("minimum_surface_area", sa.Float(), nullable=True), + sa.Column("precon_cg", sa.Integer(), nullable=True), + sa.Column("preissmann_slot", sa.Float(), nullable=True), + sa.Column("pump_implicit_ratio", sa.Float(), nullable=True), + sa.Column("thin_water_layer_definition", sa.Float(), nullable=True), + sa.Column("use_of_cg", sa.Integer(), nullable=False), + sa.Column("use_of_nested_newton", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_obstacle", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("crest_level", sa.Float(), nullable=False), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="LINESTRING", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_simple_infiltration", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("infiltration_rate", sa.Float(), nullable=True), + sa.Column("infiltration_rate_file", sa.String(length=255), nullable=True), + sa.Column("infiltration_surface_option", sa.Integer(), nullable=True), + sa.Column("max_infiltration_capacity_file", sa.Text(), nullable=True), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_surface_parameters", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("outflow_delay", sa.Float(), nullable=False), + sa.Column("surface_layer_thickness", sa.Float(), nullable=False), + sa.Column("infiltration", sa.Boolean(), nullable=False), + sa.Column("max_infiltration_capacity", sa.Float(), nullable=False), + sa.Column("min_infiltration_capacity", sa.Float(), nullable=False), + sa.Column("infiltration_decay_constant", sa.Float(), nullable=False), + sa.Column("infiltration_recovery_constant", sa.Float(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_1d_boundary_conditions", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("boundary_type", sa.Integer(), nullable=False), + sa.Column("timeseries", sa.Text(), nullable=True), + sa.Column("connection_node_id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("connection_node_id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_1d_lateral", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("connection_node_id", sa.Integer(), nullable=False), + sa.Column("timeseries", sa.Text(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_channel", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("calculation_type", sa.Integer(), nullable=False), + sa.Column("dist_calc_points", sa.Float(), nullable=True), + sa.Column("zoom_category", sa.Integer(), nullable=True), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="LINESTRING", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.Column("connection_node_start_id", sa.Integer(), nullable=False), + sa.Column("connection_node_end_id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_connected_pnt", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("calculation_pnt_id", sa.Integer(), nullable=False), + sa.Column("levee_id", sa.Integer(), nullable=True), + sa.Column("exchange_level", sa.Float(), nullable=True), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="POINT", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_control", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("control_group_id", sa.Integer(), nullable=True), + sa.Column("measure_group_id", sa.Integer(), nullable=True), + sa.Column("control_type", sa.String(length=15), nullable=True), + sa.Column("control_id", sa.Integer(), nullable=True), + sa.Column("start", sa.String(length=50), nullable=True), + sa.Column("end", sa.String(length=50), nullable=True), + sa.Column("measure_frequency", sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_control_measure_map", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("measure_group_id", sa.Integer(), nullable=True), + sa.Column("object_type", sa.String(length=100), nullable=True), + sa.Column("object_id", sa.Integer(), nullable=True), + sa.Column("weight", sa.Float(), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_culvert", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("calculation_type", sa.Integer(), nullable=True), + sa.Column("friction_value", sa.Float(), nullable=False), + sa.Column("friction_type", sa.Integer(), nullable=False), + sa.Column("dist_calc_points", sa.Float(), nullable=True), + sa.Column("zoom_category", sa.Integer(), nullable=True), + sa.Column("discharge_coefficient_positive", sa.Float(), nullable=True), + sa.Column("discharge_coefficient_negative", sa.Float(), nullable=True), + sa.Column("invert_level_start_point", sa.Float(), nullable=False), + sa.Column("invert_level_end_point", sa.Float(), nullable=False), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="LINESTRING", + srid=4326, + management=True, + ), + nullable=True, + ), + sa.Column("connection_node_start_id", sa.Integer(), nullable=False), + sa.Column("connection_node_end_id", sa.Integer(), nullable=False), + sa.Column("cross_section_definition_id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_global_settings", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("use_2d_flow", sa.Boolean(), nullable=False), + sa.Column("use_1d_flow", sa.Boolean(), nullable=False), + sa.Column("manhole_storage_area", sa.Float(), nullable=True), + sa.Column("name", sa.String(length=128), nullable=True), + sa.Column("sim_time_step", sa.Float(), nullable=False), + sa.Column("output_time_step", sa.Float(), nullable=True), + sa.Column("nr_timesteps", sa.Integer(), nullable=False), + sa.Column("start_time", sa.Text(), nullable=True), + sa.Column("start_date", sa.Text(), nullable=False), + sa.Column("grid_space", sa.Float(), nullable=False), + sa.Column("dist_calc_points", sa.Float(), nullable=False), + sa.Column("kmax", sa.Integer(), nullable=False), + sa.Column("guess_dams", sa.Integer(), nullable=True), + sa.Column("table_step_size", sa.Float(), nullable=False), + sa.Column("flooding_threshold", sa.Float(), nullable=False), + sa.Column("advection_1d", sa.Integer(), nullable=False), + sa.Column("advection_2d", sa.Integer(), nullable=False), + sa.Column("dem_file", sa.String(length=255), nullable=True), + sa.Column("frict_type", sa.Integer(), nullable=True), + sa.Column("frict_coef", sa.Float(), nullable=False), + sa.Column("frict_coef_file", sa.String(length=255), nullable=True), + sa.Column("water_level_ini_type", sa.Integer(), nullable=True), + sa.Column("initial_waterlevel", sa.Float(), nullable=False), + sa.Column("initial_waterlevel_file", sa.String(length=255), nullable=True), + sa.Column("interception_global", sa.Float(), nullable=True), + sa.Column("interception_file", sa.String(length=255), nullable=True), + sa.Column("dem_obstacle_detection", sa.Boolean(), nullable=False), + sa.Column("dem_obstacle_height", sa.Float(), nullable=True), + sa.Column("embedded_cutoff_threshold", sa.Float(), nullable=True), + sa.Column("epsg_code", sa.Integer(), nullable=True), + sa.Column("timestep_plus", sa.Boolean(), nullable=False), + sa.Column("max_angle_1d_advection", sa.Float(), nullable=True), + sa.Column("minimum_sim_time_step", sa.Float(), nullable=True), + sa.Column("maximum_sim_time_step", sa.Float(), nullable=True), + sa.Column("frict_avg", sa.Integer(), nullable=True), + sa.Column("wind_shielding_file", sa.String(length=255), nullable=True), + sa.Column("use_0d_inflow", sa.Integer(), nullable=True), + sa.Column("table_step_size_1d", sa.Float(), nullable=True), + sa.Column("table_step_size_volume_2d", sa.Float(), nullable=True), + sa.Column("use_2d_rain", sa.Integer(), nullable=False), + sa.Column("initial_groundwater_level", sa.Float(), nullable=True), + sa.Column( + "initial_groundwater_level_file", sa.String(length=255), nullable=True + ), + sa.Column("initial_groundwater_level_type", sa.Integer(), nullable=True), + sa.Column("numerical_settings_id", sa.Integer(), nullable=False), + sa.Column("interflow_settings_id", sa.Integer(), nullable=True), + sa.Column("control_group_id", sa.Integer(), nullable=True), + sa.Column("simple_infiltration_settings_id", sa.Integer(), nullable=True), + sa.Column("groundwater_settings_id", sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_impervious_surface_map", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("percentage", sa.Float(), nullable=False), + sa.Column("impervious_surface_id", sa.Integer(), nullable=False), + sa.Column("connection_node_id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_manhole", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("zoom_category", sa.Integer(), nullable=True), + sa.Column("shape", sa.String(length=4), nullable=True), + sa.Column("width", sa.Float(), nullable=True), + sa.Column("length", sa.Float(), nullable=True), + sa.Column("surface_level", sa.Float(), nullable=True), + sa.Column("bottom_level", sa.Float(), nullable=False), + sa.Column("drain_level", sa.Float(), nullable=True), + sa.Column("sediment_level", sa.Float(), nullable=True), + sa.Column("manhole_indicator", sa.Integer(), nullable=True), + sa.Column("calculation_type", sa.Integer(), nullable=True), + sa.Column("connection_node_id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_orifice", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("zoom_category", sa.Integer(), nullable=True), + sa.Column("crest_type", sa.Integer(), nullable=False), + sa.Column("crest_level", sa.Float(), nullable=False), + sa.Column("friction_value", sa.Float(), nullable=True), + sa.Column("friction_type", sa.Integer(), nullable=True), + sa.Column("discharge_coefficient_positive", sa.Float(), nullable=True), + sa.Column("discharge_coefficient_negative", sa.Float(), nullable=True), + sa.Column("sewerage", sa.Boolean(), nullable=False), + sa.Column("connection_node_start_id", sa.Integer(), nullable=False), + sa.Column("connection_node_end_id", sa.Integer(), nullable=False), + sa.Column("cross_section_definition_id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_pipe", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("profile_num", sa.Integer(), nullable=True), + sa.Column("sewerage_type", sa.Integer(), nullable=True), + sa.Column("calculation_type", sa.Integer(), nullable=False), + sa.Column("invert_level_start_point", sa.Float(), nullable=False), + sa.Column("invert_level_end_point", sa.Float(), nullable=False), + sa.Column("friction_value", sa.Float(), nullable=False), + sa.Column("friction_type", sa.Integer(), nullable=False), + sa.Column("dist_calc_points", sa.Float(), nullable=True), + sa.Column("material", sa.Integer(), nullable=True), + sa.Column("original_length", sa.Float(), nullable=True), + sa.Column("zoom_category", sa.Integer(), nullable=True), + sa.Column("connection_node_start_id", sa.Integer(), nullable=False), + sa.Column("connection_node_end_id", sa.Integer(), nullable=False), + sa.Column("cross_section_definition_id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_pumpstation", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("zoom_category", sa.Integer(), nullable=True), + sa.Column("classification", sa.Integer(), nullable=True), + sa.Column("sewerage", sa.Boolean(), nullable=True), + sa.Column("type", sa.Integer(), nullable=False), + sa.Column("start_level", sa.Float(), nullable=False), + sa.Column("lower_stop_level", sa.Float(), nullable=False), + sa.Column("upper_stop_level", sa.Float(), nullable=True), + sa.Column("capacity", sa.Float(), nullable=False), + sa.Column("connection_node_start_id", sa.Integer(), nullable=False), + sa.Column("connection_node_end_id", sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_surface", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("zoom_category", sa.Integer(), nullable=True), + sa.Column("nr_of_inhabitants", sa.Float(), nullable=True), + sa.Column("dry_weather_flow", sa.Float(), nullable=True), + sa.Column("function", sa.String(length=64), nullable=True), + sa.Column("area", sa.Float(), nullable=True), + sa.Column("surface_parameters_id", sa.Integer(), nullable=False), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="POLYGON", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_surface_map", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("surface_type", sa.String(length=40), nullable=False), + sa.Column("surface_id", sa.Integer(), nullable=False), + sa.Column("connection_node_id", sa.Integer(), nullable=False), + sa.Column("percentage", sa.Float(), nullable=True), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_weir", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("display_name", sa.String(length=255), nullable=True), + sa.Column("crest_level", sa.Float(), nullable=False), + sa.Column("crest_type", sa.Integer(), nullable=False), + sa.Column("friction_value", sa.Float(), nullable=True), + sa.Column("friction_type", sa.Integer(), nullable=True), + sa.Column("discharge_coefficient_positive", sa.Float(), nullable=True), + sa.Column("discharge_coefficient_negative", sa.Float(), nullable=True), + sa.Column("sewerage", sa.Boolean(), nullable=True), + sa.Column("external", sa.Boolean(), nullable=True), + sa.Column("zoom_category", sa.Integer(), nullable=True), + sa.Column("connection_node_start_id", sa.Integer(), nullable=False), + sa.Column("connection_node_end_id", sa.Integer(), nullable=False), + sa.Column("cross_section_definition_id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_aggregation_settings", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("global_settings_id", sa.Integer(), nullable=True), + sa.Column("var_name", sa.String(length=100), nullable=False), + sa.Column("flow_variable", sa.String(length=100), nullable=False), + sa.Column("aggregation_method", sa.String(length=100), nullable=True), + sa.Column("aggregation_in_space", sa.Boolean(), nullable=False), + sa.Column("timestep", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_cross_section_location", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("code", sa.String(length=100), nullable=True), + sa.Column("reference_level", sa.Float(), nullable=False), + sa.Column("friction_type", sa.Integer(), nullable=False), + sa.Column("friction_value", sa.Float(), nullable=False), + sa.Column("bank_level", sa.Float(), nullable=True), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="POINT", + srid=4326, + management=True, + ), + nullable=False, + ), + sa.Column("channel_id", sa.Integer(), nullable=False), + sa.Column("definition_id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + op.create_table( + "v2_windshielding", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("north", sa.Float(), nullable=True), + sa.Column("northeast", sa.Float(), nullable=True), + sa.Column("east", sa.Float(), nullable=True), + sa.Column("southeast", sa.Float(), nullable=True), + sa.Column("south", sa.Float(), nullable=True), + sa.Column("southwest", sa.Float(), nullable=True), + sa.Column("west", sa.Float(), nullable=True), + sa.Column("northwest", sa.Float(), nullable=True), + sa.Column( + "the_geom", + geoalchemy2.types.Geometry( + geometry_type="POINT", + srid=4326, + management=True, + ), + nullable=True, + ), + sa.Column("channel_id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + info={"ifexists": True}, + ) + + +def downgrade(): + pass diff --git a/threedi_modelchecker/migrations/versions/0201_migrate_friction_type_4.py b/threedi_modelchecker/migrations/versions/0201_migrate_friction_type_4.py new file mode 100644 index 00000000..8289b2ee --- /dev/null +++ b/threedi_modelchecker/migrations/versions/0201_migrate_friction_type_4.py @@ -0,0 +1,41 @@ +"""Migration the old friction_type 4 to 2 (MANNING) + +Revision ID: 0201 +Revises: +Create Date: 2021-09-29 13:50:19.544275 + +""" +from alembic import op + +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "0201" +down_revision = "0200" +branch_labels = None +depends_on = None + +TABLES = ("v2_cross_section_location", "v2_pipe", "v2_culvert", "v2_weir", "v2_orifice") +COLUMN_NAME = "friction_type" + + +def upgrade(): + for table_name in TABLES: + upgrade_single_table(table_name) + + +def downgrade(): + pass + + +def upgrade_single_table(table_name): + table = sa.table( + table_name, + sa.column("friction_type", sa.Integer), + ) + op.execute( + table.update() + .where(table.c.friction_type == op.inline_literal(4)) + .values({"friction_type": op.inline_literal(2)}) + ) diff --git a/threedi_modelchecker/schema.py b/threedi_modelchecker/schema.py index 4ba6e8b9..ca26ed63 100644 --- a/threedi_modelchecker/schema.py +++ b/threedi_modelchecker/schema.py @@ -1,7 +1,6 @@ from .errors import MigrationMissingError from .threedi_model import constants from .threedi_model import models -from alembic import command from alembic.config import Config from alembic.environment import EnvironmentContext from alembic.migration import MigrationContext @@ -34,10 +33,23 @@ def get_schema_version(): return int(env.get_head_revision()) -def _upgrade_database(db, version="head"): +def _upgrade_database(db, revision="head"): """Upgrade ThreediDatabase instance""" with db.get_engine().begin() as connection: - command.upgrade(get_alembic_config(connection), version) + config = get_alembic_config(connection) + script = ScriptDirectory.from_config(config) + + def upgrade(rev, context): + return script._upgrade_revs(revision, rev) + + with EnvironmentContext( + config, + script, + fn=upgrade, + destination_rev=revision, + version_table=constants.VERSION_TABLE_NAME, + ): + script.run_env() class ModelSchema: @@ -76,7 +88,7 @@ def get_version(self): else: return self._get_version_old() - def upgrade(self, backup=True): + def upgrade(self, revision="head", backup=True): """Upgrade the database to the latest version. This requires the current version to be at least 174 (the latest @@ -103,9 +115,9 @@ def upgrade(self, backup=True): ) if backup: with self.db.file_transaction() as work_db: - _upgrade_database(work_db) + _upgrade_database(work_db, revision=revision) else: - _upgrade_database(self.db) + _upgrade_database(self.db, revision=revision) def validate_schema(self): """Very basic validation of 3Di schema. diff --git a/threedi_modelchecker/scripts.py b/threedi_modelchecker/scripts.py index 3bfc3f2f..5e32c323 100644 --- a/threedi_modelchecker/scripts.py +++ b/threedi_modelchecker/scripts.py @@ -1,12 +1,49 @@ from threedi_modelchecker import exporters from threedi_modelchecker.checks.base import CheckLevel from threedi_modelchecker.model_checks import ThreediModelChecker +from threedi_modelchecker.schema import ModelSchema from threedi_modelchecker.threedi_database import ThreediDatabase import click @click.group() +@click.option( + "-s", + "--sqlite", + type=click.Path(exists=True, readable=True), + help="Path to an sqlite (spatialite) file", +) +@click.option("-d", "--database", help="PostGIS database name to connect to") +@click.option("-h", "--host", help="PostGIS database server host") +@click.option("-p", "--port", default=5432, help="PostGIS database server port") +@click.option("-u", "--username", help="PostGIS database username") +@click.option("-u", "--password", help="PostGIS database password") +@click.pass_context +def threedi_modelchecker(ctx, sqlite, database, host, port, username, password): + """Checks the threedi-model for errors / warnings / info messages""" + ctx.ensure_object(dict) + + if sqlite: + sqlite_settings = {"db_path": sqlite, "db_file": sqlite} + db = ThreediDatabase( + connection_settings=sqlite_settings, db_type="spatialite", echo=False + ) + else: + postgis_settings = { + "host": host, + "port": port, + "database": database, + "username": username, + "password": password, + } + db = ThreediDatabase( + connection_settings=postgis_settings, db_type="postgres", echo=False + ) + ctx.obj["db"] = db + + +@threedi_modelchecker.command() @click.option("-f", "--file", help="Write errors to file, instead of stdout") @click.option( "-l", @@ -16,8 +53,8 @@ help="Minimum check level.", ) @click.pass_context -def check_model(ctx, file, level): - """Checks the threedi-model for errors / warnings / info messages""" +def check(ctx, file, level): + """Checks the threedi model schematisation for errors.""" level = level.upper() if level == "ERROR": msg = "errors" @@ -29,58 +66,30 @@ def check_model(ctx, file, level): if file: click.echo("Model errors will be written to %s" % file) + mc = ThreediModelChecker(ctx.obj["db"]) + model_errors = mc.errors(level=level) -@check_model.command() -@click.option("-d", "--database", required=True, help="database name to connect to") -@click.option("-h", "--host", required=True, help="database server host") -@click.option("-p", "--port", required=True, default=5432, help="database server port") -@click.option("-u", "--username", required=True, help="database username") -@click.pass_context -def postgis(context, database, host, port, username, password): - """Parse a postgis model""" - postgis_settings = { - "host": host, - "port": port, - "database": database, - "username": username, - "password": password, - } - db = ThreediDatabase( - connection_settings=postgis_settings, db_type="postgres", echo=False - ) - process(db, context.parent) + if file: + exporters.export_to_file(model_errors, file) + else: + exporters.print_errors(model_errors) + + click.echo("Finished processing model") -@check_model.command() +@threedi_modelchecker.command() @click.option( - "-s", - "--sqlite", - required=True, - type=click.Path(exists=True, readable=True), - help="sqlite file", + "-r", "--revision", default="head", help="The schema revision to migrate to" ) @click.pass_context -def sqlite(context, sqlite): - """Parse a sqlite model""" - sqlite_settings = {"db_path": sqlite, "db_file": sqlite} - db = ThreediDatabase( - connection_settings=sqlite_settings, db_type="spatialite", echo=False - ) - process(db, context.parent) - - -def process(threedi_db, context): - mc = ThreediModelChecker(threedi_db) - model_errors = mc.errors(level=context.params.get("level")) - - file_output = context.params.get("file") - if file_output: - exporters.export_to_file(model_errors, file_output) - else: - exporters.print_errors(model_errors) - - click.echo("Finished processing model") +def migrate(ctx, revision): + """Migrate the threedi model schematisation to the latest version.""" + schema = ModelSchema(ctx.obj["db"]) + click.echo("The current schema revision is: %s" % schema.get_version()) + click.echo("Running alembic upgrade script...") + schema.upgrade(revision=revision) + click.echo("The migrated schema revision is: %s" % schema.get_version()) if __name__ == "__main__": - exit(check_model()) + threedi_modelchecker() diff --git a/threedi_modelchecker/threedi_model/constants.py b/threedi_modelchecker/threedi_model/constants.py index 13167822..9a803142 100644 --- a/threedi_modelchecker/threedi_model/constants.py +++ b/threedi_modelchecker/threedi_model/constants.py @@ -5,7 +5,7 @@ # Migration 174 deletes some fields, which were already not present in the # models of the threedi-modelchecker. Therefore we can both accept migration # 173 and 174. -MIN_SCHEMA_VERSION = 173 +MIN_SCHEMA_VERSION = 201 VERSION_TABLE_NAME = "schema_version"