Skip to content

Commit

Permalink
Merge branch 'main' of github.com:iiasa/ixmp4
Browse files Browse the repository at this point in the history
  • Loading branch information
meksor committed Oct 2, 2024
2 parents 3242e5f + 34021ec commit 353b744
Show file tree
Hide file tree
Showing 16 changed files with 132 additions and 58 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
timeout-minutes: 15
runs-on: ubuntu-latest
permissions:
id-token: write
deployments: write
steps:
- if: github.event_name == 'release'
uses: chrnorm/deployment-action@v2
Expand Down
30 changes: 18 additions & 12 deletions ixmp4/cli/platforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,24 +167,30 @@ def list_():
)
)
def upgrade():
utils.echo(
f"Establishing self-signed admin connection to '{settings.manager_url}'."
)
if settings.managed:
utils.echo(
f"Establishing self-signed admin connection to '{settings.manager_url}'."
)
manager_conf = ManagerConfig(
str(settings.manager_url),
SelfSignedAuth(settings.secret_hs256),
remote=False,
)
platform_list = manager_conf.list_platforms()
else:
platform_list = settings.toml.list_platforms()

manager_conf = ManagerConfig(
str(settings.manager_url), SelfSignedAuth(settings.secret_hs256), remote=False
)
for m in manager_conf.list_platforms():
if m.dsn.startswith("http"):
for p in platform_list:
if p.dsn.startswith("http"):
# This should probably never happen unless the manager registers an
# external rest platform.
utils.echo(f"Skipping '{m.name}' because it is a REST platform.")
utils.echo(f"Skipping '{p.name}' because it is a REST platform.")
else:
utils.echo(f"Upgrading manager platform '{m.name}' with dsn '{m.dsn}'...")
utils.echo(f"Upgrading platform '{p.name}' with dsn '{p.dsn}'...")
try:
alembic.upgrade_database(m.dsn, "head")
alembic.upgrade_database(p.dsn, "head")
except OperationalError as e:
utils.echo(f"Skipping '{m.name}' because of an error: {str(e)}")
utils.echo(f"Skipping '{p.name}' because of an error: {str(e)}")


@app.command(
Expand Down
4 changes: 2 additions & 2 deletions ixmp4/conf/logging/server.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,12 @@
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "DEBUG",
"level": "INFO",
"formatter": "generic"
}
},
"root": {
"level": "DEBUG",
"level": "INFO",
"handlers": ["console"]
}
}
17 changes: 11 additions & 6 deletions ixmp4/core/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,12 +178,17 @@ class InvalidCredentials(IxmpError):
http_error_name = "invalid_credentials"


# == Optimization ==


class OptimizationDataValidationError(IxmpError):
http_status_code = 422
http_error_name = "optimization_data_validation_error"


# == Optimization.Table ==


class OptimizationTableDataKeysNotUnique(NotUnique):
_message = (
"The keys of the Table's data are not unique. Please consider using "
"`constrained_to_indexsets` to specify the IndexSet identifiers."
)
http_error_name = "optimization_table_data_keys_not_unique"
class OptimizationTableUsageError(IxmpError):
http_status_code = 422
http_error_name = "optimization_table_usage_error"
7 changes: 2 additions & 5 deletions ixmp4/data/api/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,10 +167,7 @@ def retry(max_retries=max_retries) -> dict | list | None:
**kwargs,
)

if params is None:
params = {}
else:
params = self.sanitize_params(params)
params = self.sanitize_params(params) if params else {}

try:
res = self.backend.client.request(
Expand All @@ -181,7 +178,7 @@ def retry(max_retries=max_retries) -> dict | list | None:
**kwargs,
)
except httpx.ReadTimeout:
logger.warn("Read timeout, retrying request...")
logger.warning("Read timeout, retrying request...")
return retry()

return self._handle_response(res, retry)
Expand Down
1 change: 1 addition & 0 deletions ixmp4/data/backend/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,7 @@ def make_client(self, rest_url: str, auth: BaseAuth):
self.client = TestClient(
app=app,
base_url=rest_url,
raise_server_exceptions=False,
)

app.dependency_overrides[deps.validate_token] = deps.do_not_validate_token
Expand Down
6 changes: 6 additions & 0 deletions ixmp4/data/db/optimization/base.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
from typing import ClassVar

from ixmp4.core.exceptions import IxmpError
from ixmp4.data import types

from .. import mixins
Expand All @@ -16,6 +19,9 @@


class BaseModel(RootBaseModel, mixins.HasCreationInfo):
# NOTE: only subclasses storing data actually define this!
DataInvalid: ClassVar[type[IxmpError]]

__abstract__ = True
table_prefix = "optimization_"

Expand Down
6 changes: 5 additions & 1 deletion ixmp4/data/db/optimization/indexset/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from sqlalchemy.orm import validates

from ixmp4 import db
from ixmp4.core.exceptions import OptimizationDataValidationError
from ixmp4.data import types
from ixmp4.data.abstract import optimization as abstract

Expand All @@ -12,6 +13,7 @@
class IndexSet(base.BaseModel):
NotFound: ClassVar = abstract.IndexSet.NotFound
NotUnique: ClassVar = abstract.IndexSet.NotUnique
DataInvalid: ClassVar = OptimizationDataValidationError
DeletionPrevented: ClassVar = abstract.IndexSet.DeletionPrevented

elements: types.JsonList = db.Column(db.JsonType, nullable=False, default=[])
Expand All @@ -21,7 +23,9 @@ def validate_elements(self, key, value: list[float | int | str]):
unique = set()
for element in value:
if element in unique:
raise ValueError(f"{element} already defined for IndexSet {self.name}!")
raise self.DataInvalid(
f"{element} already defined for IndexSet {self.name}!"
)
else:
unique.add(element)
return value
Expand Down
6 changes: 5 additions & 1 deletion ixmp4/data/db/optimization/table/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from sqlalchemy.orm import validates

from ixmp4 import db
from ixmp4.core.exceptions import OptimizationDataValidationError
from ixmp4.data import types
from ixmp4.data.abstract import optimization as abstract

Expand All @@ -14,6 +15,7 @@ class Table(base.BaseModel):
# NOTE: These might be mixin-able, but would require some abstraction
NotFound: ClassVar = abstract.Table.NotFound
NotUnique: ClassVar = abstract.Table.NotUnique
DataInvalid: ClassVar = OptimizationDataValidationError
DeletionPrevented: ClassVar = abstract.Table.DeletionPrevented

# constrained_to_indexsets: ClassVar[list[str] | None] = None
Expand All @@ -22,10 +24,12 @@ class Table(base.BaseModel):
columns: types.Mapped[list["Column"]] = db.relationship()
data: types.JsonDict = db.Column(db.JsonType, nullable=False, default={})

# TODO: should we pass self to validate_data to raise more specific errors?

@validates("data")
def validate_data(self, key, data: dict[str, Any]):
return utils.validate_data(
key=key,
host=self,
data=data,
columns=self.columns,
)
Expand Down
9 changes: 6 additions & 3 deletions ixmp4/data/db/optimization/table/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import pandas as pd

from ixmp4 import db
from ixmp4.core.exceptions import OptimizationTableUsageError
from ixmp4.data.abstract import optimization as abstract
from ixmp4.data.auth.decorators import guard

Expand All @@ -19,6 +20,8 @@ class TableRepository(
):
model_class = Table

UsageError = OptimizationTableUsageError

def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.docs = TableDocsRepository(*args, **kwargs)
Expand Down Expand Up @@ -108,16 +111,16 @@ def create(
if isinstance(constrained_to_indexsets, str):
constrained_to_indexsets = list(constrained_to_indexsets)
if column_names and len(column_names) != len(constrained_to_indexsets):
raise ValueError(
raise self.UsageError(
"`constrained_to_indexsets` and `column_names` not equal in length! "
"Please provide the same number of entries for both!"
)
# TODO: activate something like this if each column must be indexed by a unique
# indexset
# if len(constrained_to_indexsets) != len(set(constrained_to_indexsets)):
# raise ValueError("Each dimension must be constrained to a unique indexset!") # noqa
# raise self.UsageError("Each dimension must be constrained to a unique indexset!") # noqa
if column_names and len(column_names) != len(set(column_names)):
raise ValueError("The given `column_names` are not unique!")
raise self.UsageError("The given `column_names` are not unique!")

table = super().create(
run_id=run_id,
Expand Down
20 changes: 14 additions & 6 deletions ixmp4/data/db/optimization/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

import pandas as pd

from . import base

if TYPE_CHECKING:
from .column import Column

Expand All @@ -17,37 +19,43 @@ def collect_indexsets_to_check(
return collection


def validate_data(key, data: dict[str, Any], columns: list["Column"]):
def validate_data(host: base.BaseModel, data: dict[str, Any], columns: list["Column"]):
data_frame: pd.DataFrame = pd.DataFrame.from_dict(data)
# TODO for all of the following, we might want to create unique exceptions
# Could me make both more specific by specifiying missing/extra columns?
if len(data_frame.columns) < len(columns):
raise ValueError(
raise host.DataInvalid(
f"While handling {host.__str__()}: \n"
f"Data is missing for some Columns! \n Data: {data} \n "
f"Columns: {[column.name for column in columns]}"
)
elif len(data_frame.columns) > len(columns):
raise ValueError(
raise host.DataInvalid(
f"While handling {host.__str__()}: \n"
f"Trying to add data to unknown Columns! \n Data: {data} \n "
f"Columns: {[column.name for column in columns]}"
)

# We could make this more specific maybe by pointing to the missing values
if data_frame.isna().any(axis=None):
raise ValueError(
raise host.DataInvalid(
f"While handling {host.__str__()}: \n"
"The data is missing values, please make sure it "
"does not contain None or NaN, either!"
)
# We can make this more specific e.g. highlighting all duplicate rows via
# pd.DataFrame.duplicated(keep="False")
if data_frame.value_counts().max() > 1:
raise ValueError("The data contains duplicate rows!")
raise host.DataInvalid(
f"While handling {host.__str__()}: \n" "The data contains duplicate rows!"
)

# Can we make this more specific? Iterating over columns; if any is False,
# return its name or something?
limited_to_indexsets = collect_indexsets_to_check(columns=columns)
if not data_frame.isin(limited_to_indexsets).all(axis=None):
raise ValueError(
raise host.DataInvalid(
f"While handling {host.__str__()}: \n"
"The data contains values that are not allowed as per the IndexSets "
"and Columns it is constrained to!"
)
Expand Down
2 changes: 1 addition & 1 deletion ixmp4/server/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
ixmp4 server start [--host 127.0.0.1] [--port 8000]
This will start ixmp4’s asgi server. Check
``http://127.0.0.1:8000/v1/<platform>/docs``.
``http://127.0.0.1:8000/v1/<platform>/docs/``.
"""

Expand Down
5 changes: 3 additions & 2 deletions tests/core/test_indexset.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import ixmp4
from ixmp4.core import IndexSet
from ixmp4.core.exceptions import OptimizationDataValidationError

from ..utils import create_indexsets_for_run

Expand Down Expand Up @@ -67,10 +68,10 @@ def test_add_elements(self, platform: ixmp4.Platform):

assert indexset_1.elements == indexset_2.elements

with pytest.raises(ValueError):
with pytest.raises(OptimizationDataValidationError):
indexset_1.add(["baz", "foo"])

with pytest.raises(ValueError):
with pytest.raises(OptimizationDataValidationError):
indexset_2.add(["baz", "baz"])

indexset_1.add(1)
Expand Down
Loading

0 comments on commit 353b744

Please sign in to comment.