Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,16 @@ script_location = alembic
[test_sqlite]
sqlalchemy.url = sqlite:///modelmeta/data/mddb-v2.sqlite

[test_sqlite_local]
# SQLite on local filesystem
sqlalchemy.url = sqlite:///test.sqlite

[test_pg_local]
# Postgres server in local docker container
sqlalchemy.url = postgresql://postgres@localhost:30011/mm_test

[test_pg_pcic_meta]
# DEPRECATED: Postgres server running locally (not dockerized)
sqlalchemy.url = postgresql://pcic_meta@localhost/modelmeta_test

[prod_pcic_meta]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
"""Add streamflow order and results tables

Revision ID: c0810e121564
Revises: 12f290b63791
Create Date: 2018-09-05 11:41:58.245456

"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = 'c0810e121564'
down_revision = '12f290b63791'
branch_labels = None
depends_on = None


def upgrade():
op.create_table('streamflow_results',
sa.Column('streamflow_result_id', sa.Integer(), nullable=False),
sa.Column('data_file_id', sa.Integer(), nullable=True),
sa.Column('station_id', sa.Integer(), nullable=True),
sa.Column('status', sa.Enum('queued', 'processing', 'error', 'cancelled', 'ready', 'removed', name='streamflow_result_statuses'), nullable=False),
sa.ForeignKeyConstraint(['data_file_id'], ['data_files.data_file_id'], ),
sa.ForeignKeyConstraint(['station_id'], ['stations.station_id'], ),
sa.PrimaryKeyConstraint('streamflow_result_id')
)

op.create_table('streamflow_orders',
sa.Column('streamflow_order_id', sa.Integer(), nullable=False),
sa.Column('hydromodel_output_id', sa.Integer(), nullable=False),
sa.Column('streamflow_result_id', sa.Integer(), nullable=False),
sa.Column('longitude', sa.Float(), nullable=False),
sa.Column('latitude', sa.Float(), nullable=False),
sa.Column('notification_method', sa.Enum('none', 'email', name='notification_methods'), nullable=False),
sa.Column('notification_address', sa.String(length=255), nullable=True),
sa.Column('status', sa.Enum('accepted', 'fulfilled', 'cancelled', 'error', name='streamflow_order_statuses'), nullable=False),
sa.ForeignKeyConstraint(['hydromodel_output_id'], ['data_files.data_file_id'], ),
sa.ForeignKeyConstraint(['streamflow_result_id'], ['streamflow_results.streamflow_result_id'], ),
sa.PrimaryKeyConstraint('streamflow_order_id')
)


def downgrade():
Copy link
Contributor

@corviday corviday Sep 12, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This downgrade does not cleanly remove the upgrade, because it does not delete the streamflow_order_statuses type. While it's unlikely to be something we need to do, we can't run the upgrade, run the downgrade, and then run the upgrade again without this error message:

sqlalchemy.exc.ProgrammingError: (psycopg2.ProgrammingError) type "streamflow_result_statuses" already exists
 [SQL: "CREATE TYPE streamflow_result_statuses AS ENUM ('queued', 'processing', 'error', 'cancelled', 'ready', 'removed')"] (Background on this error at: http://sqlalche.me/e/f405)

Is it possible to delete the type? DROP TYPE or something?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good catch, @corviday !

Hmm. According to SQLAlchemy docs, "The table.drop() call will now emit a DROP TYPE for a table-level enumerated type."

Several things are possible here:

  1. You're running the tests with a pre-1.1.0 version of SQLAlchemy.
  2. The ENUM isn't table-level. This seems the likeliest to me, but needs some research.
  3. The ENUM is table-level, but Alembic is doing something that isn't exactly table.drop() -- hard to imagine that, but...

op.drop_table('streamflow_orders')
op.drop_table('streamflow_results')
70 changes: 70 additions & 0 deletions modelmeta/v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@
VariableAlias
YCellBound
SpatialRefSys
StreamflowOrder
StreamflowResult
'''.split()

from pkg_resources import resource_filename
Expand Down Expand Up @@ -692,3 +694,71 @@ def __repr__(self):

# We don't declare constraints on SpatialRefSys because the Postgis plugin is
# responsible for creating it.


class StreamflowOrder(Base):
__tablename__ = 'streamflow_orders'

# column definitions
id = Column('streamflow_order_id', Integer, primary_key=True, nullable=False)
hydromodel_output_id = Column(
Integer, ForeignKey('data_files.data_file_id'),
nullable=False)
streamflow_result_id = Column(
Integer, ForeignKey('streamflow_results.streamflow_result_id'),
nullable=False)
longitude = Column(Float, nullable=False)
latitude = Column(Float, nullable=False)
notification_method = Column(
Enum(
'none', 'email',
name='notification_methods'
),
nullable=False
)
notification_address = Column(String(length=255), nullable=True)
status = Column(
Enum(
'accepted', 'fulfilled', 'cancelled', 'error',
name='streamflow_order_statuses'
),
nullable=False
)

# relationships
hydromodel_output = relationship('DataFile')
result = relationship('StreamflowResult', back_populates='orders')

def __str__(self):
return obj_repr(
'id hydromodel_output_id streamflow_result_id longitude latitude '
'notification_address status', self)


class StreamflowResult(Base):
__tablename__ = 'streamflow_results'

# column definitions
id = Column('streamflow_result_id', Integer, primary_key=True, nullable=False)
data_file_id = Column(
Integer, ForeignKey('data_files.data_file_id'),
nullable=True)
station_id = Column(
Integer, ForeignKey('stations.station_id'),
nullable=True)
status = Column(
Enum(
'queued', 'processing', 'error', 'cancelled', 'ready', 'removed',
name='streamflow_result_statuses',
),
nullable=False
)

# relationships
data_file = relationship('DataFile')
station = relationship('Station')
orders = relationship('StreamflowOrder', back_populates='result')

def __str__(self):
return obj_repr(
'id data_file_id station_id status', self)
12 changes: 8 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,10 @@ def make_data_file(i, run=None, timeset=None):
def data_file_1():
return make_data_file(1)

@pytest.fixture(scope='function')
def data_file_2():
return make_data_file(2)


# Grid

Expand Down Expand Up @@ -181,15 +185,15 @@ def make_test_dfv_dsg_time_series(i, file=None, variable_alias=None):


@pytest.fixture(scope='function')
def dfv_dsg_time_series_1(data_file_1, variable_alias_1):
def dfv_dsg_time_series_1(data_file_2, variable_alias_1):
return make_test_dfv_dsg_time_series(
1, file=data_file_1, variable_alias=variable_alias_1)
1, file=data_file_2, variable_alias=variable_alias_1)


@pytest.fixture(scope='function')
def dfv_dsg_time_series_2(data_file_1, variable_alias_2):
def dfv_dsg_time_series_2(data_file_2, variable_alias_2):
return make_test_dfv_dsg_time_series(
2, file=data_file_1, variable_alias=variable_alias_2)
2, file=data_file_2, variable_alias=variable_alias_2)


# Station
Expand Down
Loading