Skip to content

Commit

Permalink
Switch from stdlib-logging to Loguru
Browse files Browse the repository at this point in the history
  • Loading branch information
ehrenfeu committed Jun 20, 2023
1 parent 5037b1b commit ca951e1
Show file tree
Hide file tree
Showing 8 changed files with 176 additions and 159 deletions.
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ repository = "https://github.com/imcf/pyppms"
[tool.poetry.dependencies]
python = "^3.9"
requests = "^2.31.0"
loguru = "^0.7.0"

[tool.poetry.group.dev.dependencies]
black = "^23.3.0"
Expand Down
17 changes: 8 additions & 9 deletions src/pyppms/booking.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
"""Module representing bookings / reservations in PPMS."""

import logging
from datetime import datetime

from .common import time_rel_to_abs
from loguru import logger as log

LOG = logging.getLogger(__name__)
from .common import time_rel_to_abs


class PpmsBooking:
Expand Down Expand Up @@ -62,10 +61,10 @@ def __init__(self, text, booking_type, system_id):
self.endtime = endtime
self.session = lines[2]
except Exception as err:
LOG.error("Parsing booking response failed (%s), text was:\n%s", err, text)
log.error("Parsing booking response failed ({}), text was:\n{}", err, text)
raise

LOG.debug(str(self))
log.debug(str(self))

@classmethod
def from_runningsheet(cls, entry, system_id, username, date):
Expand Down Expand Up @@ -96,8 +95,8 @@ def from_runningsheet(cls, entry, system_id, username, date):
booking.starttime_fromstr(entry["Start time"], date)
booking.endtime_fromstr(entry["End time"], date)
except Exception as err:
LOG.error(
"Parsing runningsheet entry failed (%s), text was:\n%s", err, entry
log.error(
"Parsing runningsheet entry failed ({}), text was:\n{}", err, entry
)
raise

Expand All @@ -123,7 +122,7 @@ def starttime_fromstr(self, time_str, date=None):
microsecond=0,
)
self.starttime = start
LOG.debug("New starttime: %s", self)
log.debug("New starttime: {}", self)

def endtime_fromstr(self, time_str, date=None):
"""Change the ending time and / or day of a booking.
Expand All @@ -145,7 +144,7 @@ def endtime_fromstr(self, time_str, date=None):
microsecond=0,
)
self.endtime = end
LOG.debug("New endtime: %s", self)
log.debug("New endtime: {}", self)

def __str__(self):
def fmt_time(time):
Expand Down
27 changes: 13 additions & 14 deletions src/pyppms/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,10 @@
# pylint: disable-msg=fixme

from datetime import datetime, timedelta
import logging
import csv
from io import StringIO

LOG = logging.getLogger(__name__)
from loguru import logger as log


def process_response_values(values):
Expand Down Expand Up @@ -74,28 +73,28 @@ def dict_from_single_response(text, graceful=True):
try:
lines = list(csv.reader(StringIO(text), delimiter=","))
if len(lines) != 2:
LOG.warning("Response expected to have exactly two lines: %s", text)
log.warning("Response expected to have exactly two lines: {}", text)
if not graceful:
raise ValueError("Invalid response format!")
header = lines[0]
data = lines[1]
process_response_values(data)
if len(header) != len(data):
msg = "Parsing CSV failed, mismatch of header vs. data fields count"
LOG.warning("%s (%s vs. %s)", msg, len(header), len(data))
log.warning("{} ({} vs. {})", msg, len(header), len(data))
if not graceful:
raise ValueError(msg)
minimum = min(len(header), len(data))
if minimum < len(header):
LOG.warning("Discarding header-fields: %s", header[minimum:])
log.warning("Discarding header-fields: {}", header[minimum:])
header = header[:minimum]
else:
LOG.warning("Discarding data-fields: %s", data[minimum:])
log.warning("Discarding data-fields: {}", data[minimum:])
data = data[:minimum]

except Exception as err:
msg = f"Unable to parse data returned by PUMAPI: {text} - ERROR: {err}"
LOG.error(msg)
log.error(msg)
raise ValueError(msg) from err

parsed = dict(zip(header, data))
Expand Down Expand Up @@ -135,7 +134,7 @@ def parse_multiline_response(text, graceful=True):
try:
lines = text.splitlines()
if len(lines) < 2:
LOG.warning("Response expected to have two or more lines: %s", text)
log.warning("Response expected to have two or more lines: {}", text)
if not graceful:
raise ValueError("Invalid response format!")
return parsed
Expand All @@ -152,32 +151,32 @@ def parse_multiline_response(text, graceful=True):
lines_min = min(lines_min, len(data))
if len(header) != len(data):
msg = "Parsing CSV failed, mismatch of header vs. data fields count"
LOG.warning("%s (%s vs. %s)", msg, len(header), len(data))
log.warning("{} ({} vs. {})", msg, len(header), len(data))
if not graceful:
raise ValueError(msg)

minimum = min(len(header), len(data))
if minimum < len(header):
LOG.warning("Discarding header-fields: %s", header[minimum:])
log.warning("Discarding header-fields: {}", header[minimum:])
header = header[:minimum]
else:
LOG.warning("Discarding data-fields: %s", data[minimum:])
log.warning("Discarding data-fields: {}", data[minimum:])
data = data[:minimum]

details = dict(zip(header, data))
# LOG.debug(details)
# log.debug(details)
parsed.append(details)

if lines_min != lines_max:
msg = (
"Inconsistent data detected, not all dicts will have the "
"same number of elements!"
)
LOG.warning(msg)
log.warning(msg)

except Exception as err:
msg = f"Unable to parse data returned by PUMAPI: {text} - ERROR: {err}"
LOG.error(msg)
log.error(msg)
raise ValueError(msg) from err

return parsed
Expand Down
Loading

0 comments on commit ca951e1

Please sign in to comment.