diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index b7b9254299..0b3e9aae5e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -46,7 +46,7 @@ workflow: variables: GIT_CLONE_PATH: '$CI_BUILDS_DIR/$CI_COMMIT_SHA/$CI_JOB_ID' YNH_SOURCE: "https://github.com/yunohost" - YNH_DEBIAN: "bookworm" + YNH_DEBIAN: "trixie" YNH_SKIP_DIAGNOSIS_DURING_UPGRADE: "true" include: diff --git a/.gitlab/ci/build.gitlab-ci.yml b/.gitlab/ci/build.gitlab-ci.yml index aedef5ac8c..a9a16ca4aa 100644 --- a/.gitlab/ci/build.gitlab-ci.yml +++ b/.gitlab/ci/build.gitlab-ci.yml @@ -2,7 +2,7 @@ stage: build needs: - job: actionsmap - - job: invalidcode311 + - job: invalidcode312 image: "build-and-lint" variables: YNH_BUILD_DIR: "$GIT_CLONE_PATH/build" diff --git a/.gitlab/ci/lint.gitlab-ci.yml b/.gitlab/ci/lint.gitlab-ci.yml index 6edaf2c98b..2d168cee1a 100644 --- a/.gitlab/ci/lint.gitlab-ci.yml +++ b/.gitlab/ci/lint.gitlab-ci.yml @@ -11,27 +11,27 @@ actionsmap: - python3 -c 'import yaml; yaml.safe_load(open("share/actionsmap.yml"))' - python3 -c 'import yaml; yaml.safe_load(open("share/actionsmap-portal.yml"))' -lint311: +lint312: stage: lint image: "build-and-lint" needs: [] allow_failure: true script: - - tox -e py311-lint + - tox -e py312-lint -invalidcode311: +invalidcode312: stage: lint image: "build-and-lint" needs: [] script: - - tox -e py311-invalidcode + - tox -e py312-invalidcode mypy: stage: lint image: "build-and-lint" needs: [] script: - - tox -e py311-mypy + - tox -e py312-mypy i18n-keys: stage: lint diff --git a/conf/nginx/global.conf b/conf/nginx/global.conf index b3a5f356a8..e69de29bb2 100644 --- a/conf/nginx/global.conf +++ b/conf/nginx/global.conf @@ -1 +0,0 @@ -server_tokens off; diff --git a/debian/control b/debian/control index fe5cb5a20c..cbe7c08fed 100644 --- a/debian/control +++ b/debian/control @@ -16,7 +16,7 @@ Depends: python3-all (>= 3.11), , python3-toml, python3-packaging, python3-publicsuffix2 , python3-ldap, python3-zeroconf (>= 0.47), python3-lexicon, , python3-cryptography, python3-jwt, python3-passlib, python3-magic - , python-is-python3, python3-pydantic, python3-email-validator + , python-is-python3, python3-email-validator, python3-pydantic, python3-pydantic-extra-types , nginx, nginx-extras (>=1.22) , apt, apt-transport-https, apt-utils, aptitude, dirmngr , openssh-server, iptables, fail2ban, bind9-dnsutils @@ -39,12 +39,13 @@ Conflicts: iptables-persistent , bind9 , openresolv , systemd-resolved - , nginx-extras (>= 1.23) - , openssl (>= 3.1) - , slapd (>= 2.6) - , dovecot-core (>= 1:2.4) - , fail2ban (>= 1.1) - , iptables (>= 1.8.10) +# FIXME: Those version should be updated to trixie-backports versions +# , nginx-extras (>= 1.23) +# , openssl (>= 3.1) +# , slapd (>= 2.6) +# , dovecot-core (>= 1:2.4) +# , fail2ban (>= 1.1) +# , iptables (>= 1.8.10) Description: manageable and configured self-hosting server YunoHost aims to make self-hosting accessible to everyone. It configures an email, Web and IM server alongside a LDAP base. It also provides diff --git a/helpers/helpers.v1.d/mysql b/helpers/helpers.v1.d/mysql index 846e01fe97..bf15403861 100644 --- a/helpers/helpers.v1.d/mysql +++ b/helpers/helpers.v1.d/mysql @@ -120,7 +120,7 @@ ynh_mysql_drop_db() { # # usage: ynh_mysql_dump_db --database=database # | arg: -d, --database= - the database name to dump -# | ret: The mysqldump output +# | ret: The mariadb-dump output # # example: ynh_mysql_dump_db --database=roundcube > ./dump.sql # @@ -133,7 +133,7 @@ ynh_mysql_dump_db() { # Manage arguments with getopts ynh_handle_getopts_args "$@" - mysqldump --single-transaction --skip-dump-date --routines "$database" + mariadb-dump --single-transaction --skip-dump-date --routines "$database" } # Create a user @@ -184,7 +184,7 @@ ynh_mysql_user_exists() { # ynh_mysql_database_exists() { local database=$1 - mysqlshow | grep -qE "^|\s+$database\s+|" + mariadb-show | grep -qE "^|\s+$database\s+|" } # Drop a user diff --git a/helpers/helpers.v2.1.d/mysql b/helpers/helpers.v2.1.d/mysql index bc532a536e..bd23dc16b1 100644 --- a/helpers/helpers.v2.1.d/mysql +++ b/helpers/helpers.v2.1.d/mysql @@ -58,13 +58,13 @@ ynh_mysql_drop_db() { # # usage: ynh_mysql_dump_db database # | arg: database - the database name to dump (by default, $db_name) -# | ret: The mysqldump output +# | ret: The mariadb-dump output # # example: ynh_mysql_dump_db "roundcube" > ./dump.sql # ynh_mysql_dump_db() { local database=${1:-$db_name} - mysqldump --single-transaction --skip-dump-date --routines "$database" + mariadb-dump --single-transaction --skip-dump-date --routines "$database" } # Create a user @@ -101,7 +101,7 @@ ynh_mysql_user_exists() { # ynh_mysql_database_exists() { local database=$1 - mysqlshow | grep -q "^| $database " + mariadb-show | grep -q "^| $database " } # Drop a user diff --git a/hooks/conf_regen/10-apt b/hooks/conf_regen/10-apt index 769ac1b53e..2a896ea334 100755 --- a/hooks/conf_regen/10-apt +++ b/hooks/conf_regen/10-apt @@ -11,7 +11,15 @@ do_pre_regen() { # Add sury mkdir -p ${pending_dir}/etc/apt/sources.list.d/ - echo "deb [signed-by=/etc/apt/trusted.gpg.d/extra_php_version.gpg] https://packages.sury.org/php/ $(lsb_release --codename --short) main" > "${pending_dir}/etc/apt/sources.list.d/extra_php_version.list" + + # FIXME: sury doesn't support trixie yet + case "$(lsb_release --codename --short)" in + bullseye) sury_distro=bullseye ;; + bookworm) sury_distro=bookworm ;; + trixie) sury_distro=bookworm ;; + esac + + echo "deb [signed-by=/etc/apt/trusted.gpg.d/extra_php_version.gpg] https://packages.sury.org/php/ $sury_distro main" > "${pending_dir}/etc/apt/sources.list.d/extra_php_version.list" # Ban some packages from sury echo " diff --git a/src/app.py b/src/app.py index 1642f938b8..94be7c7a12 100644 --- a/src/app.py +++ b/src/app.py @@ -1121,7 +1121,7 @@ def app_install( # Retrieve arguments list for install script raw_options = manifest["install"] options, form = ask_questions_and_parse_answers(raw_options, prefilled_answers=args) - args = form.dict(exclude_none=True) + args = form.model_dump(exclude_none=True) # Validate domain / path availability for webapps # (ideally this should be handled by the resource system for manifest v >= 2 @@ -1892,7 +1892,7 @@ def _apply( previous_settings: dict[str, Any], exclude: Union["AbstractSetIntStr", "MappingIntStrAny", None] = None, ) -> None: - env = {key: str(value) for key, value in form.dict().items()} + env = {key: str(value) for key, value in form.model_dump().items()} return_content = self._call_config_script("apply", env=env) # If the script returned validation error @@ -1908,7 +1908,7 @@ def _apply( ) def _run_action(self, form: "FormModel", action_id: str) -> None: - env = {key: str(value) for key, value in form.dict().items()} + env = {key: str(value) for key, value in form.model_dump().items()} self._call_config_script(action_id, env=env) def _call_config_script( diff --git a/src/domain.py b/src/domain.py index 4bc4478c58..bc6cc7eba9 100644 --- a/src/domain.py +++ b/src/domain.py @@ -797,7 +797,9 @@ def _apply( exclude: Union["AbstractSetIntStr", "MappingIntStrAny", None] = None, ) -> None: next_settings = { - k: v for k, v in form.dict().items() if previous_settings.get(k) != v + k: v + for k, v in form.model_dump().items() + if previous_settings.get(k) != v } if "default_app" in next_settings: @@ -863,7 +865,7 @@ def _apply( # that can be read by the portal API. # FIXME remove those from the config panel saved values? - portal_values = form.dict(include=set(portal_options)) + portal_values = form.model_dump(include=set(portal_options)) # Remove logo from values else filename will replace b64 content if "portal_logo" in portal_values: portal_values.pop("portal_logo") diff --git a/src/migrations/0027_migrate_to_bookworm.py b/src/migrations/0027_migrate_to_bookworm.py deleted file mode 100644 index b26128e364..0000000000 --- a/src/migrations/0027_migrate_to_bookworm.py +++ /dev/null @@ -1,503 +0,0 @@ -import glob -import os -import subprocess -from time import sleep -from datetime import date - -# Explicitly import packages to prevent an issue that may arise later because of python3.9 being replaced by 3.11 in the middle of the upgrade etc -import _strptime # noqa: F401 -import _ldap # noqa: F401 - -from moulinette import Moulinette, m18n -from moulinette.utils.process import call_async_output -from yunohost.utils.error import YunohostError -from yunohost.tools import _write_migration_state -from moulinette.utils.process import check_output -from moulinette.utils.filesystem import read_file, write_to_file - -from yunohost.tools import ( - Migration, - tools_update, -) -from yunohost.app import unstable_apps -from yunohost.regenconf import manually_modified_files, regen_conf -from yunohost.utils.system import ( - free_space_in_directory, - get_ynh_package_version, - _list_upgradable_apt_packages, - aptitude_with_progress_bar, -) - -# getActionLogger is not there in bookworm, -# we use this try/except to make it agnostic wether or not we're on 11.x or 12.x -# otherwise this may trigger stupid issues -try: - from moulinette.utils.log import getActionLogger - - logger = getActionLogger("yunohost.migration") -except ImportError: - import logging - - logger = logging.getLogger("yunohost.migration") - - -N_CURRENT_DEBIAN = 11 -N_CURRENT_YUNOHOST = 11 - -VENV_REQUIREMENTS_SUFFIX = ".requirements_backup_for_bookworm_upgrade.txt" - - -def _get_all_venvs(dir, level=0, maxlevel=3): - """ - Returns the list of all python virtual env directories recursively - - Arguments: - dir - the directory to scan in - maxlevel - the depth of the recursion - level - do not edit this, used as an iterator - """ - if not os.path.exists(dir): - return [] - - result = [] - # Using os functions instead of glob, because glob doesn't support hidden folders, and we need recursion with a fixed depth - for file in os.listdir(dir): - path = os.path.join(dir, file) - if os.path.isdir(path): - activatepath = os.path.join(path, "bin", "activate") - if os.path.isfile(activatepath): - content = read_file(activatepath) - if ("VIRTUAL_ENV" in content) and ("PYTHONHOME" in content): - result.append(path) - continue - if level < maxlevel: - result += _get_all_venvs(path, level=level + 1) - return result - - -def _backup_pip_freeze_for_python_app_venvs(): - """ - Generate a requirements file for all python virtual env located inside /opt/ and /var/www/ - """ - - venvs = _get_all_venvs("/opt/") + _get_all_venvs("/var/www/") - for venv in venvs: - # Generate a requirements file from venv - # Remove pkg resources from the freeze to avoid an error during the python venv https://stackoverflow.com/a/40167445 - os.system( - f"{venv}/bin/pip freeze | grep -E -v 'pkg(-|_)resources==' > {venv}{VENV_REQUIREMENTS_SUFFIX} 2>/dev/null" - ) - - -class MyMigration(Migration): - "Upgrade the system to Debian Bookworm and Yunohost 12.x" - - mode = "manual" - - def run(self): - self.check_assertions() - - logger.info(m18n.n("migration_0027_start")) - - # - # Add new apt .deb signing key - # - - new_apt_key = "https://forge.yunohost.org/yunohost_bookworm.asc" - os.system( - f'wget --timeout 900 --quiet "{new_apt_key}" --output-document=- | gpg --dearmor >"/usr/share/keyrings/yunohost-bookworm.gpg"' - ) - - # Add Sury key even if extra_php_version.list was already there, - # because some old system may be using an outdated key not valid for Bookworm - # and that'll block the migration - os.system( - 'wget --timeout 900 --quiet "https://packages.sury.org/php/apt.gpg" --output-document=- | gpg --dearmor >"/etc/apt/trusted.gpg.d/extra_php_version.gpg"' - ) - - # - # Patch sources.list - # - - logger.info(m18n.n("migration_0027_patching_sources_list")) - self.patch_apt_sources_list() - - # - # Get requirements of the different venvs from python apps - # - - _backup_pip_freeze_for_python_app_venvs() - - # - # Run apt update - # - - aptitude_with_progress_bar("update") - - # Tell libc6 it's okay to restart system stuff during the upgrade - os.system( - "echo 'libc6 libraries/restart-without-asking boolean true' | debconf-set-selections" - ) - - # Stupid stuff because resolvconf later wants to edit /etc/resolv.conf and will miserably crash if it's immutable - os.system("chattr -i /etc/resolv.conf") - - # Do not restart nginx during the upgrade of nginx-common and nginx-extras ... - # c.f. https://manpages.debian.org/bullseye/init-system-helpers/deb-systemd-invoke.1p.en.html - # and zcat /usr/share/doc/init-system-helpers/README.policy-rc.d.gz - # and the code inside /usr/bin/deb-systemd-invoke to see how it calls /usr/sbin/policy-rc.d ... - # and also invoke-rc.d ... - write_to_file( - "/usr/sbin/policy-rc.d", - '#!/bin/bash\n[[ "$1" =~ "nginx" ]] && exit 101 || exit 0', - ) - os.system("chmod +x /usr/sbin/policy-rc.d") - - # Don't send an email to root about the postgresql migration. It should be handled automatically after. - os.system( - "echo 'postgresql-common postgresql-common/obsolete-major seen true' | debconf-set-selections" - ) - - # - # Patch yunohost conflicts - # - logger.info(m18n.n("migration_0027_patch_yunohost_conflicts")) - - self.patch_yunohost_conflicts() - - # - # Critical fix for RPI otherwise network is down after rebooting - # https://forum.yunohost.org/t/20652 - # - # FIXME : this is from buster->bullseye, do we still needed it ? - # - # if os.system("systemctl | grep -q dhcpcd") == 0: - # logger.info("Applying fix for DHCPCD ...") - # os.system("mkdir -p /etc/systemd/system/dhcpcd.service.d") - # write_to_file( - # "/etc/systemd/system/dhcpcd.service.d/wait.conf", - # "[Service]\nExecStart=\nExecStart=/usr/sbin/dhcpcd -w", - # ) - - # - # Main upgrade - # - logger.info(m18n.n("migration_0027_main_upgrade")) - - # Mark php, mariadb, metronome and rspamd as "auto" so that they may be uninstalled if they ain't explicitly wanted by app or admins - php_packages = self.get_php_packages() - aptitude_with_progress_bar( - f"markauto mariadb-server metronome rspamd {' '.join(php_packages)}" - ) - - # Hold import yunohost packages - apps_packages = self.get_apps_equivs_packages() - aptitude_with_progress_bar( - f"hold yunohost moulinette ssowat yunohost-admin {' '.join(apps_packages)}" - ) - - # Dirty hack to be able to remove rspamd because it's causing too many issues due to libluajit ... - command = "sed -i /var/lib/dpkg/status -e 's@rspamd, @@g'" - logger.debug(f"Running: {command}") - os.system(command) - - aptitude_with_progress_bar( - "full-upgrade cron rspamd- luajit- libluajit-5.1-2- --show-why -o APT::Force-LoopBreak=1 -o Dpkg::Options::='--force-confold'" - ) - - # For some reason aptitude is derping about python3 / python3-venv so try to explicitly tell to install python3.11 to replace 3.9... - # Note the '+M' prefix which is here to mark the packages as automatically installed - python_upgrade_list = "python3 python3.11+M python3.9- " - if os.system('dpkg --list | grep -q "^ii python3.9-venv "') == 0: - python_upgrade_list += "python3-venv+M python3.11-venv+M python3.9-venv-" - aptitude_with_progress_bar( - f"full-upgrade {python_upgrade_list} --show-why -o APT::Force-LoopBreak=1 -o Dpkg::Options::='--force-confold'" - ) - - # Full upgrade of "every" packages except the yunohost ones which are held - aptitude_with_progress_bar( - "full-upgrade --show-why -o Dpkg::Options::='--force-confold'" - ) - - # Force regenconf of nsswitch because for some reason - # /etc/nsswitch.conf is reset despite the --force-confold? It's a - # disaster because then admins cannot "sudo" >_> ... - regen_conf(names=["nsswitch"], force=True) - - if self.debian_major_version() == N_CURRENT_DEBIAN: - raise YunohostError("migration_0027_still_on_bullseye_after_main_upgrade") - - # Clean the mess - logger.info(m18n.n("migration_0027_cleaning_up")) - os.system( - "LC_ALL=C DEBIAN_FRONTEND=noninteractive APT_LISTCHANGES_FRONTEND=none apt autoremove --assume-yes" - ) - os.system("apt clean --assume-yes") - - # - # Stupid hack for stupid dnsmasq not picking up its new init.d script then breaking everything ... - # https://forum.yunohost.org/t/20676 - # - # FIXME : this is from buster->bullseye, do we still needed it ? - # - # if os.path.exists("/etc/init.d/dnsmasq.dpkg-dist"): - # logger.info("Copying new version for /etc/init.d/dnsmasq ...") - # os.system("cp /etc/init.d/dnsmasq.dpkg-dist /etc/init.d/dnsmasq") - - # - # Yunohost upgrade - # - logger.info(m18n.n("migration_0027_yunohost_upgrade")) - aptitude_with_progress_bar("unhold yunohost moulinette ssowat yunohost-admin") - - full_upgrade_cmd = ( - "full-upgrade --show-why -o Dpkg::Options::='--force-confold' " - ) - full_upgrade_cmd += "yunohost yunohost-admin yunohost-portal moulinette ssowat " - # This one is needed to solve aptitude derping with nginx dependencies - full_upgrade_cmd += "libluajit2-5.1-2 " - - try: - aptitude_with_progress_bar(full_upgrade_cmd) - except Exception: - # Retry after unholding the app packages, maybe it can unlock the situation idk - if apps_packages: - aptitude_with_progress_bar(f"unhold {' '.join(apps_packages)}") - aptitude_with_progress_bar(full_upgrade_cmd) - else: - # If the upgrade was sucessful, we want to unhold the apps packages - if apps_packages: - aptitude_with_progress_bar(f"unhold {' '.join(apps_packages)}") - - # Mark this migration as completed before triggering the "new" migrations - _write_migration_state(self.id, "done") - - callbacks = ( - lambda l: logger.debug("+ " + l.rstrip() + "\r"), - lambda l: logger.warning(l.rstrip()), - ) - try: - call_async_output(["yunohost", "tools", "migrations", "run"], callbacks) - except Exception as e: - logger.error(e) - - # If running from the webadmin, restart the API after a delay - if Moulinette.interface.type == "api": - logger.warning(m18n.n("migration_0027_delayed_api_restart")) - sleep(5) - # Restart the API after 10 sec (at now doesn't support sub-minute times...) - # We do this so that the API / webadmin still gets the proper HTTP response - cmd = 'at -M now >/dev/null 2>&1 <<< "sleep 10; systemctl restart nginx yunohost-api"' - # For some reason subprocess doesn't like the redirections so we have to use bash -c explicity... - subprocess.check_call(["bash", "-c", cmd]) - - if self.yunohost_major_version() != N_CURRENT_YUNOHOST + 1: - raise YunohostError( - "Still on YunoHost 11.x at the end of the migration, eh? Sounds like the migration didn't really complete!?", - raw_msg=True, - ) - - def debian_major_version(self): - # The python module "platform" and lsb_release are not reliable because - # on some setup, they may still return Release=9 even after upgrading to - # buster ... (Apparently this is related to OVH overriding some stuff - # with /etc/lsb-release for instance -_-) - # Instead, we rely on /etc/os-release which should be the raw info from - # the distribution... - return int( - check_output( - "grep VERSION_ID /etc/os-release | head -n 1 | tr '\"' ' ' | cut -d ' ' -f2" - ) - ) - - def yunohost_major_version(self): - return int(get_ynh_package_version("yunohost")["version"].split(".")[0]) - - def check_assertions(self): - # Be on bullseye (11.x) and yunohost 11.x - # NB : we do both check to cover situations where the upgrade crashed - # in the middle and debian version could be > 12.x but yunohost package - # would still be in 11.x... - if ( - not self.debian_major_version() == N_CURRENT_DEBIAN - and not self.yunohost_major_version() == N_CURRENT_YUNOHOST - ): - try: - # Here we try to find the previous migration log, which should be somewhat recent and be at least 10k (we keep the biggest one) - maybe_previous_migration_log_id = check_output( - "cd /var/log/yunohost/categories/operation && find -name '*migrate*.log' -size +10k -mtime -100 -exec ls -s {} \\; | sort -n | tr './' ' ' | awk '{print $2}' | tail -n 1" - ) - if maybe_previous_migration_log_id: - logger.info( - f"NB: the previous migration log id seems to be {maybe_previous_migration_log_id}. You can share it with the support team with : sudo yunohost log share {maybe_previous_migration_log_id}" - ) - except Exception: - # Yeah it's not that important ... it's to simplify support ... - pass - - raise YunohostError("migration_0027_not_bullseye") - - # Have > 1 Go free space on /var/ ? - if free_space_in_directory("/var/") / (1024**3) < 1.0: - raise YunohostError("migration_0027_not_enough_free_space") - - # Have > 70 MB free space on /var/ ? - if free_space_in_directory("/boot/") / (1024**2) < 70.0: - raise YunohostError( - "/boot/ has less than 70MB available. This will probably trigger a crash during the upgrade because a new kernel needs to be installed. Please look for advice on the forum on how to remove old, unused kernels to free up some space in /boot/.", - raw_msg=True, - ) - - # Check system is up to date - # (but we don't if 'bullseye' is already in the sources.list ... - # which means maybe a previous upgrade crashed and we're re-running it) - if os.path.exists("/etc/apt/sources.list") and " bookworm " not in read_file( - "/etc/apt/sources.list" - ): - tools_update(target="system") - upgradable_system_packages = list(_list_upgradable_apt_packages()) - upgradable_system_packages = [ - package["name"] for package in upgradable_system_packages - ] - upgradable_system_packages = set(upgradable_system_packages) - # Lime2 have hold packages to avoid ethernet instability - # See https://github.com/YunoHost/arm-images/commit/b4ef8c99554fd1a122a306db7abacc4e2f2942df - lime2_hold_packages = set( - [ - "armbian-firmware", - "armbian-bsp-cli-lime2", - "linux-dtb-current-sunxi", - "linux-image-current-sunxi", - "linux-u-boot-lime2-current", - "linux-image-next-sunxi", - ] - ) - if upgradable_system_packages - lime2_hold_packages: - raise YunohostError("migration_0027_system_not_fully_up_to_date") - - @property - def disclaimer(self): - # Avoid having a super long disclaimer + uncessary check if we ain't - # on bullseye / yunohost 11.x - # NB : we do both check to cover situations where the upgrade crashed - # in the middle and debian version could be 12.x but yunohost package - # would still be in 11.x... - if ( - not self.debian_major_version() == N_CURRENT_DEBIAN - and not self.yunohost_major_version() == N_CURRENT_YUNOHOST - ): - return None - - # Get list of problematic apps ? I.e. not official or community+working - problematic_apps = unstable_apps() - problematic_apps = "".join(["\n - " + app for app in problematic_apps]) - - # Manually modified files ? (c.f. yunohost service regen-conf) - modified_files = manually_modified_files() - modified_files = "".join(["\n - " + f for f in modified_files]) - - message = m18n.n("migration_0027_general_warning") - - message = ( - ( - "N.B.: This migration has been tested by the community over the last few months but has only been declared stable recently. If your server hosts critical services and if you are not too confident with debugging possible issues, we recommend you to wait a little bit more while we gather more feedback and polish things up. If on the other hand you are relatively confident with debugging small issues that may arise, you are encouraged to run this migration 😉!" - if date.today() < date(2025, 3, 30) - else "" - ) - + "\n\n" - + "You can read the full release note, remaining known issues and feedback from the community here: . In particular, we encourage you to pay attention to the fact that:\n" - + "- Packages `metronome` (xmpp server) and `rspamd` (mail antispam) are now independent applications available in the catalog. Make sure to explicitly install these applications after the migration if you care about those!\n" - + "- The user portal / SSO system was totally reworked. You may lose custom theming if you have any. However, the new system also has plenty of customization capabilities (more details in the release note).\n" - + "\n" - + message - ) - - if problematic_apps: - message += "\n\n" + m18n.n( - "migration_0027_problematic_apps_warning", - problematic_apps=problematic_apps, - ) - - if modified_files: - message += "\n\n" + m18n.n( - "migration_0027_modified_files", manually_modified_files=modified_files - ) - - return message - - def patch_apt_sources_list(self): - sources_list = glob.glob("/etc/apt/sources.list.d/*.list") - if os.path.exists("/etc/apt/sources.list"): - sources_list.append("/etc/apt/sources.list") - - # This : - # - replace single 'bullseye' occurence by 'bookworm' - # - comments lines containing "backports" - # - replace 'bullseye/updates' by 'bookworm/updates' (or same with -) - # - make sure the yunohost line has the "signed-by" thingy - # - replace "non-free" with "non-free non-free-firmware" - # Special note about the security suite: - # https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html#security-archive - for f in sources_list: - command = ( - f"sed -i {f} " - "-e 's@ bullseye @ bookworm @g' " - "-e '/backports/ s@^#*@#@' " - "-e 's@ bullseye/updates @ bookworm-security @g' " - "-e 's@ bullseye-@ bookworm-@g' " - "-e '/non-free-firmware/!s@ non-free@ non-free non-free-firmware@g' " - "-e 's@deb.*http://forge.yunohost.org@deb [signed-by=/usr/share/keyrings/yunohost-bookworm.gpg] http://forge.yunohost.org@g' " - ) - os.system(command) - - # Stupid OVH has some repo configured which dont work with next debian and break apt ... - os.system("rm -f /etc/apt/sources.list.d/ovh-*.list") - - def get_apps_equivs_packages(self): - command = ( - "dpkg --get-selections" - " | grep -v deinstall" - " | awk '{print $1}'" - " | { grep 'ynh-deps$' || true; }" - ) - - output = check_output(command) - - return output.split("\n") if output else [] - - def get_php_packages(self): - command = ( - "dpkg --get-selections" - " | grep -v deinstall" - " | awk '{print $1}'" - " | { grep '^php' || true; }" - ) - - output = check_output(command) - - return output.split("\n") if output else [] - - def patch_yunohost_conflicts(self): - # - # This is a super dirty hack to remove the conflicts from yunohost's debian/control file - # Those conflicts are there to prevent mistakenly upgrading critical packages - # such as dovecot, postfix, nginx, openssl, etc... usually related to mistakenly - # using backports etc. - # - # The hack consists in savagely removing the conflicts directly in /var/lib/dpkg/status - # - - # We only patch the conflict if we're on yunohost 11.x - if self.yunohost_major_version() != N_CURRENT_YUNOHOST: - return - - conflicts = check_output("dpkg-query -s yunohost | grep '^Conflicts:'").strip() - if conflicts: - # We want to keep conflicting with apache/bind9 tho - new_conflicts = "Conflicts: apache2, bind9" - - command = ( - f"sed -i /var/lib/dpkg/status -e 's@{conflicts}@{new_conflicts}@g'" - ) - logger.debug(f"Running: {command}") - os.system(command) diff --git a/src/migrations/0028_delete_legacy_xmpp_permission.py b/src/migrations/0028_delete_legacy_xmpp_permission.py deleted file mode 100644 index de5d2b9832..0000000000 --- a/src/migrations/0028_delete_legacy_xmpp_permission.py +++ /dev/null @@ -1,32 +0,0 @@ -from logging import getLogger - -from yunohost.tools import Migration - -logger = getLogger("yunohost.migration") - -################################################### -# Tools used also for restoration -################################################### - - -class MyMigration(Migration): - """ - Delete legacy XMPP permission - """ - - introduced_in_version = "12.0" - dependencies = [] - - ldap_migration_started = False - - @Migration.ldap_migration - def run(self, *args): - from yunohost.permission import user_permission_list, permission_delete - - self.ldap_migration_started = True - - if "xmpp.main" in user_permission_list()["permissions"]: - permission_delete("xmpp.main", force=True) - - def run_after_system_restore(self): - self.run() diff --git a/src/migrations/0029_postgresql_13_to_15.py b/src/migrations/0029_postgresql_13_to_15.py deleted file mode 100644 index f74d33a761..0000000000 --- a/src/migrations/0029_postgresql_13_to_15.py +++ /dev/null @@ -1,91 +0,0 @@ -import subprocess -import time -import os -from logging import getLogger - -from moulinette import m18n -from yunohost.utils.error import YunohostError, YunohostValidationError - -from yunohost.tools import Migration -from yunohost.utils.system import free_space_in_directory, space_used_by_directory - -logger = getLogger("yunohost.migration") - - -class MyMigration(Migration): - "Migrate DBs from Postgresql 13 to 15 after migrating to Bookworm" - - dependencies = ["migrate_to_bookworm"] - - def run(self): - if ( - os.system( - 'grep -A10 "ynh-deps" /var/lib/dpkg/status | grep -E "Package:|Depends:" | grep -B1 postgresql' - ) - != 0 - ): - logger.info("No YunoHost app seem to require postgresql... Skipping!") - return - - if not self.package_is_installed("postgresql-13"): - logger.warning(m18n.n("migration_0029_postgresql_13_not_installed")) - return - - if not self.package_is_installed("postgresql-15"): - raise YunohostValidationError("migration_0029_postgresql_15_not_installed") - - # Make sure there's a 13 cluster - try: - self.runcmd("pg_lsclusters | grep -q '^13 '") - except Exception: - logger.warning( - "It looks like there's not active 13 cluster, so probably don't need to run this migration" - ) - return - - if not space_used_by_directory( - "/var/lib/postgresql/13" - ) > free_space_in_directory("/var/lib/postgresql"): - raise YunohostValidationError( - "migration_0029_not_enough_space", path="/var/lib/postgresql/" - ) - - self.runcmd("systemctl stop postgresql") - time.sleep(3) - self.runcmd( - "LC_ALL=C pg_dropcluster --stop 15 main || true" - ) # We do not trigger an exception if the command fails because that probably means cluster 15 doesn't exists, which is fine because it's created during the pg_upgradecluster) - time.sleep(3) - self.runcmd("LC_ALL=C pg_upgradecluster -m upgrade 13 main -v 15") - self.runcmd("LC_ALL=C pg_dropcluster --stop 13 main") - self.runcmd("systemctl start postgresql") - - def package_is_installed(self, package_name): - (returncode, out, err) = self.runcmd( - "dpkg --list | grep '^ii ' | grep -q -w {}".format(package_name), - raise_on_errors=False, - ) - return returncode == 0 - - def runcmd(self, cmd, raise_on_errors=True): - logger.debug("Running command: " + cmd) - - p = subprocess.Popen( - cmd, - shell=True, - executable="/bin/bash", - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - out, err = p.communicate() - returncode = p.returncode - if raise_on_errors and returncode != 0: - raise YunohostError( - "Failed to run command '{}'.\nreturncode: {}\nstdout:\n{}\nstderr:\n{}\n".format( - cmd, returncode, out, err - ) - ) - - out = out.strip().split(b"\n") - return (returncode, out, err) diff --git a/src/migrations/0030_rebuild_python_venv_in_bookworm.py b/src/migrations/0030_rebuild_python_venv_in_bookworm.py deleted file mode 100644 index 4534d58834..0000000000 --- a/src/migrations/0030_rebuild_python_venv_in_bookworm.py +++ /dev/null @@ -1,188 +0,0 @@ -import os -from logging import getLogger - -from moulinette import m18n -from moulinette.utils.process import call_async_output - -from yunohost.tools import Migration, tools_migrations_state -from moulinette.utils.filesystem import rm - - -logger = getLogger("yunohost.migration") - -VENV_REQUIREMENTS_SUFFIX = ".requirements_backup_for_bookworm_upgrade.txt" - - -def extract_app_from_venv_path(venv_path): - venv_path = venv_path.replace("/var/www/", "") - venv_path = venv_path.replace("/opt/yunohost/", "") - venv_path = venv_path.replace("/opt/", "") - return venv_path.split("/")[0] - - -def _get_all_venvs(dir, level=0, maxlevel=3): - """ - Returns the list of all python virtual env directories recursively - - Arguments: - dir - the directory to scan in - maxlevel - the depth of the recursion - level - do not edit this, used as an iterator - """ - if not os.path.exists(dir): - return [] - - # Using os functions instead of glob, because glob doesn't support hidden - # folders, and we need recursion with a fixed depth - result = [] - for file in os.listdir(dir): - path = os.path.join(dir, file) - if os.path.isdir(path): - activatepath = os.path.join(path, "bin", "activate") - if os.path.isfile(activatepath) and os.path.isfile( - path + VENV_REQUIREMENTS_SUFFIX - ): - result.append(path) - continue - if level < maxlevel: - result += _get_all_venvs(path, level=level + 1) - return result - - -class MyMigration(Migration): - """ - After the update, recreate a python virtual env based on the previously - generated requirements file - """ - - ignored_python_apps = [ - "diacamma", # Does an ugly sed in the sites-packages/django_auth_ldap3_ad - "homeassistant", # uses a custom version of Python - "immich", # uses a custom version of Python - "kresus", # uses virtualenv instead of venv, with --system-site-packages (?) - "librephotos", # runs a setup.py ? not sure pip freeze / pip install -r requirements.txt is gonna be equivalent .. - "mautrix", # install stuff from a .tar.gz - "microblogpub", # uses poetry ? x_x - "mopidy", # applies a custom patch? - "motioneye", # install stuff from a .tar.gz - "pgadmin", # bunch of manual patches - "searxng", # uses --system-site-packages ? - "synapse", # specific stuff for ARM to prevent local compiling etc - "matrix-synapse", # synapse is actually installed in /opt/yunohost/matrix-synapse because ... yeah ... - "tracim", # pip install -e . - "weblate", # weblate settings are .. inside the venv T_T - ] - - dependencies = ["migrate_to_bookworm"] - state = None - - def is_pending(self): - if not self.state: - self.state = tools_migrations_state()["migrations"].get( - "0030_rebuild_python_venv_in_bookworm", "pending" - ) - return self.state == "pending" - - @property - def mode(self): - if not self.is_pending(): - return "auto" - - if _get_all_venvs("/opt/") + _get_all_venvs("/var/www/"): - return "manual" - else: - return "auto" - - @property - def disclaimer(self): - # Avoid having a super long disclaimer to generate if migrations has - # been done - if not self.is_pending(): - return None - - # Disclaimer should be empty if in auto, otherwise it excepts the --accept-disclaimer option during debian postinst - if self.mode == "auto": - return None - - ignored_apps = [] - rebuild_apps = [] - - venvs = _get_all_venvs("/opt/") + _get_all_venvs("/var/www/") - for venv in venvs: - if not os.path.isfile(venv + VENV_REQUIREMENTS_SUFFIX): - continue - - app_corresponding_to_venv = extract_app_from_venv_path(venv) - - # Search for ignore apps - if any( - app_corresponding_to_venv.startswith(app) - for app in self.ignored_python_apps - ): - ignored_apps.append(app_corresponding_to_venv) - else: - rebuild_apps.append(app_corresponding_to_venv) - - msg = m18n.n("migration_0030_rebuild_python_venv_in_bookworm_disclaimer_base") - if rebuild_apps: - msg += "\n\n" + m18n.n( - "migration_0030_rebuild_python_venv_in_bookworm_disclaimer_rebuild", - rebuild_apps="\n - " + "\n - ".join(rebuild_apps), - ) - if ignored_apps: - msg += "\n\n" + m18n.n( - "migration_0030_rebuild_python_venv_in_bookworm_disclaimer_ignored", - ignored_apps="\n - " + "\n - ".join(ignored_apps), - ) - - return msg - - def run(self): - if self.mode == "auto": - return - - venvs = _get_all_venvs("/opt/") + _get_all_venvs("/var/www/") - for venv in venvs: - app_corresponding_to_venv = extract_app_from_venv_path(venv) - - # Search for ignore apps - if any( - app_corresponding_to_venv.startswith(app) - for app in self.ignored_python_apps - ): - rm(venv + VENV_REQUIREMENTS_SUFFIX) - logger.info( - m18n.n( - "migration_0030_rebuild_python_venv_in_bookworm_broken_app", - app=app_corresponding_to_venv, - ) - ) - continue - - logger.info( - m18n.n( - "migration_0030_rebuild_python_venv_in_bookworm_in_progress", - app=app_corresponding_to_venv, - ) - ) - - # Recreate the venv - rm(venv, recursive=True) - callbacks = ( - lambda l: logger.debug("+ " + l.rstrip() + "\r"), - lambda l: logger.warning(l.rstrip()), - ) - call_async_output(["python", "-m", "venv", venv], callbacks) - status = call_async_output( - [f"{venv}/bin/pip", "install", "-r", venv + VENV_REQUIREMENTS_SUFFIX], - callbacks, - ) - if status != 0: - logger.error( - m18n.n( - "migration_0030_rebuild_python_venv_in_bookworm_failed", - app=app_corresponding_to_venv, - ) - ) - else: - rm(venv + VENV_REQUIREMENTS_SUFFIX) diff --git a/src/settings.py b/src/settings.py index aee49c0132..8b93d4d439 100644 --- a/src/settings.py +++ b/src/settings.py @@ -160,7 +160,7 @@ def reset( self.config, self.form = self._get_config_panel(prevalidate=True) # FIXME find a better way to exclude previous settings - previous_settings = self.form.dict() + previous_settings = self.form.model_dump() for option in self.config.options: if not option.readonly and ( @@ -250,7 +250,7 @@ def _apply( super()._apply(form, config, previous_settings, exclude=self.virtual_settings) next_settings = { k: v - for k, v in form.dict(exclude=self.virtual_settings).items() + for k, v in form.model_dump(exclude=self.virtual_settings).items() if previous_settings.get(k) != v } diff --git a/src/tests/test_app_resources.py b/src/tests/test_app_resources.py index 38e712a29f..45b63bb820 100644 --- a/src/tests/test_app_resources.py +++ b/src/tests/test_app_resources.py @@ -272,21 +272,21 @@ def test_resource_database(): r = AppResourceClassesByType["database"] conf = {"type": "mysql"} - assert os.system("mysqlshow 'testapp' >/dev/null 2>/dev/null") != 0 + assert os.system("mariadb-show 'testapp' >/dev/null 2>/dev/null") != 0 assert not app_setting("testapp", "db_name") assert not app_setting("testapp", "db_user") assert not app_setting("testapp", "db_pwd") r(conf, "testapp").provision_or_update() - assert os.system("mysqlshow 'testapp' >/dev/null 2>/dev/null") == 0 + assert os.system("mariadb-show 'testapp' >/dev/null 2>/dev/null") == 0 assert app_setting("testapp", "db_name") assert app_setting("testapp", "db_user") assert app_setting("testapp", "db_pwd") r(conf, "testapp").deprovision() - assert os.system("mysqlshow 'testapp' >/dev/null 2>/dev/null") != 0 + assert os.system("mariadb-show 'testapp' >/dev/null 2>/dev/null") != 0 assert not app_setting("testapp", "db_name") assert not app_setting("testapp", "db_user") assert not app_setting("testapp", "db_pwd") diff --git a/src/tests/test_questions.py b/src/tests/test_questions.py index af8bc4469f..2f09c3d5f9 100644 --- a/src/tests/test_questions.py +++ b/src/tests/test_questions.py @@ -661,8 +661,8 @@ class TestString(BaseTest): scenarios = [ *nones(None, "", output=""), # basic typed values - (False, "False"), - (True, "True"), + (False, FAIL), + (True, FAIL), (0, "0"), (1, "1"), (-1, "-1"), @@ -702,8 +702,8 @@ class TestText(BaseTest): scenarios = [ *nones(None, "", output=""), # basic typed values - (False, "False"), - (True, "True"), + (False, FAIL), + (True, FAIL), (0, "0"), (1, "1"), (-1, "-1"), @@ -743,14 +743,11 @@ class TestPassword(BaseTest): } # fmt: off scenarios = [ - *all_fails(False, True, 0, 1, -1, 1337, 13.37, raw_option={"optional": True}), - *all_fails([], ["one"], {}, raw_option={"optional": True}, error=AttributeError), # FIXME those fails with AttributeError + *all_fails(False, True, 0, 1, -1, 1337, 13.37, [], ["one"], {}, raw_option={"optional": True}), *all_fails("none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value", "value\n", raw_option={"optional": True}), *nones(None, "", output=""), ("s3cr3t!!", YunohostError, {"default": "SUPAs3cr3t!!"}), # default is forbidden - *xpass(scenarios=[ - ("s3cr3t!!", "s3cr3t!!", {"example": "SUPAs3cr3t!!"}), # example is forbidden - ], reason="Should fail; example is forbidden"), + ("s3cr3t!!", YunohostError, {"example": "SUPAs3cr3t!!"}), # example is forbidden *xpass(scenarios=[ (" value \n moarc0mpl1cat3d\n ", "value \n moarc0mpl1cat3d"), (" some_ value", "some_ value"), @@ -970,17 +967,17 @@ class TestTime(BaseTest): # 1337 seconds == 22 minutes *all_as(1337, "1337", output="00:22"), # Negative timestamp fails - *all_fails(-1, "-1", error=OverflowError), # FIXME should handle that as a validation error + *all_fails(-1, "-1"), # *all_fails(False, True, 0, 1, -1, 1337, 13.37, [], ["one"], {}, raw_option={"optional": True}), *all_fails("none", "_none", "False", "True", "[]", ",", "['one']", "one,two", r"{}", "value", "value\n", raw_option={"optional": True}), *nones(None, "", output=""), # custom valid *unchanged("00:00", "08:00", "12:19", "20:59", "23:59"), - ("3:00", "03:00"), - ("23:1", "23:01"), ("22:35:05", "22:35"), ("22:35:03.514", "22:35"), # custom invalid + ("3:00", FAIL), + ("23:1", FAIL), ("24:00", FAIL), ("23:005", FAIL), # readonly @@ -1021,16 +1018,16 @@ class TestEmail(BaseTest): "राम@मोहन.ईन्फो", "юзер@екзампл.ком", "θσερ@εχαμπλε.ψομ", - "葉士豪@臺網中心.tw", "jeff@臺網中心.tw", - "葉士豪@臺網中心.台灣", - "jeff葉@臺網中心.tw", "ñoñó@example.tld", "甲斐黒川日本@example.tld", "чебурашкаящик-с-апельсинами.рф@example.tld", "उदाहरण.परीक्ष@domain.with.idn.tld", "ιωάννης@εεττ.gr", ), + ("葉士豪@臺網中心.tw", "葉士豪@臺網中心.tw"), + ("jeff葉@臺網中心.tw", "jeff葉@臺網中心.tw"), + ("葉士豪@臺網中心.台灣", "葉士豪@臺網中心.台灣"), # invalid email (Hiding because our current regex is very permissive) *all_fails( "my@localhost", @@ -1148,22 +1145,17 @@ class TestUrl(BaseTest): *nones(None, "", output=""), ("http://some.org/folder/file.txt", "http://some.org/folder/file.txt"), - (' https://www.example.com \n', 'https://www.example.com'), + (' https://www.example.com \n', 'https://www.example.com/'), # readonly ("https://overwrite.org", "https://example.org", {"readonly": True, "default": "https://example.org"}), # rest is taken from https://github.com/pydantic/pydantic/blob/main/tests/test_networks.py # valid *unchanged( # Those are valid but not sure how they will output with pydantic - 'http://example.org', 'https://example.org/whatever/next/', - 'https://example.org', - 'https://foo_bar.example.com/', - 'http://example.co.jp', 'http://www.example.com/a%C2%B1b', 'http://www.example.com/~username/', - 'http://info.example.com?fred', 'http://info.example.com/?fred', 'http://xn--mgbh0fb.xn--kgbechtv/', 'http://example.com/blue/red%3Fand+green', @@ -1171,13 +1163,8 @@ class TestUrl(BaseTest): 'http://xn--rsum-bpad.example.org/', 'http://123.45.67.8/', 'http://123.45.67.8:8329/', - 'http://[2001:db8::ff00:42]:8329', - 'http://[2001::1]:8329', 'http://[2001:db8::1]/', 'http://www.example.com:8000/foo', - 'http://www.cwi.nl:80/%7Eguido/Python.html', - 'https://www.python.org/путь', - 'http://андрей@example.com', 'https://exam_ple.com/', 'http://twitter.com/@handle/', 'http://11.11.11.11.example.com/action', @@ -1188,25 +1175,36 @@ class TestUrl(BaseTest): 'http://example.org/path?query#fragment', 'https://foo_bar.example.com/', 'https://exam_ple.com/', - 'HTTP://EXAMPLE.ORG', - 'https://example.org', - 'https://example.org?a=1&b=2', - 'https://example.org#a=3;b=3', - 'https://example.xn--p1ai', - 'https://example.xn--vermgensberatung-pwb', - 'https://example.xn--zfr164b', + 'http://localhost/', + 'http://localhost:8000/', + 'http://example/#', + 'http://example/#fragment', + 'http://example/?#', ), - *xfail(scenarios=[ - ('http://test', 'http://test'), - ('http://localhost', 'http://localhost'), - ('http://localhost/', 'http://localhost/'), - ('http://localhost:8000', 'http://localhost:8000'), - ('http://localhost:8000/', 'http://localhost:8000/'), - ('http://example#', 'http://example#'), - ('http://example/#', 'http://example/#'), - ('http://example/#fragment', 'http://example/#fragment'), - ('http://example/?#', 'http://example/?#'), - ], reason="Should this be valid?"), + *[ + (url, url + '/') + for url in [ + 'http://example.org', + 'https://example.org', + 'http://example.co.jp', + 'http://[2001:db8::ff00:42]:8329', + 'http://[2001::1]:8329', + 'https://example.xn--p1ai', + 'https://example.xn--vermgensberatung-pwb', + 'https://example.xn--zfr164b', + 'http://test', + 'http://localhost', + 'http://localhost:8000' + ] + ], + ('http://info.example.com?fred', 'http://info.example.com/?fred'), + ('http://example#', 'http://example/#'), + ('HTTP://EXAMPLE.ORG', 'http://example.org/'), + ('https://example.org?a=1&b=2', 'https://example.org/?a=1&b=2'), + ('https://example.org#a=3;b=3', 'https://example.org/#a=3;b=3'), + ('http://www.cwi.nl:80/%7Eguido/Python.html', 'http://www.cwi.nl/%7Eguido/Python.html'), + ('https://www.python.org/путь', 'https://www.python.org/%D0%BF%D1%83%D1%82%D1%8C'), + ('http://андрей@example.com', 'http://%D0%B0%D0%BD%D0%B4%D1%80%D0%B5%D0%B9@example.com/'), # invalid *all_fails( 'ftp://example.com/', @@ -1409,7 +1407,6 @@ class TestSelect(BaseTest): }, { "raw_options": [ - {"choices": {-1: "verbose -one", 0: "verbose zero", 1: "verbose one", 10: "verbose ten"}}, {"choices": {"-1": "verbose -one", "0": "verbose zero", "1": "verbose one", "10": "verbose ten"}}, ], "scenarios": [ @@ -1419,9 +1416,20 @@ class TestSelect(BaseTest): *all_fails("100", 100), ] }, + { + "raw_options": [ + {"choices": {-1: "verbose -one", 0: "verbose zero", 1: "verbose one", 10: "verbose ten"}}, + ], + "scenarios": [ + *nones(None, "", output=""), + *unchanged(-1, 0, 1, 10), + *all_fails("-1", "0", "1", "10"), + *all_fails("100", 100), + ] + }, # [True, False, None] *unchanged(True, False, raw_option={"choices": [True, False, None]}), # FIXME we should probably forbid None in choices - (None, FAIL, {"choices": [True, False, None]}), + (None, "", {"choices": [True, False, None]}), { # mixed types "raw_options": [{"choices": ["one", 2, True]}], @@ -1438,7 +1446,7 @@ class TestSelect(BaseTest): "raw_options": [{"choices": ""}, {"choices": []}], "scenarios": [ # FIXME those should fail at option level (wrong default, dev error) - *all_fails(None, ""), + *all_fails(None, "", error=YunohostError), *xpass(scenarios=[ ("", "", {"optional": True}), (None, "", {"optional": True}), @@ -1891,7 +1899,7 @@ def test_options_query_string(): "time_id": "20:55", "email_id": "coucou@ynh.org", "path_id": "/ynh-dev", - "url_id": "https://yunohost.org", + "url_id": "https://yunohost.org/", "file_id": file_content1, "select_id": "one", "tags_id": "one,two", diff --git a/src/tools.py b/src/tools.py index 2efe2fc195..c9a4d00eb3 100644 --- a/src/tools.py +++ b/src/tools.py @@ -680,15 +680,6 @@ def get_matching_migration(target): raise YunohostValidationError("migrations_no_such_migration", id=target) - # Dirty hack to mark the bullseye->bookworm as done ... - # it may still be marked as 'pending' if for some reason the migration crashed, - # but the admins ran 'apt full-upgrade' to manually finish the migration - # ... in which case it won't be magically flagged as 'done' until here - migrate_to_bookworm = get_matching_migration("migrate_to_bookworm") - if migrate_to_bookworm.state == "pending": - migrate_to_bookworm.state = "done" - _write_migration_state(migrate_to_bookworm.id, "done") - # auto, skip and force are exclusive options if auto + skip + force_rerun > 1: raise YunohostValidationError("migrations_exclusive_options") diff --git a/src/utils/configpanel.py b/src/utils/configpanel.py index 37318813ea..2f5bbc4a02 100644 --- a/src/utils/configpanel.py +++ b/src/utils/configpanel.py @@ -23,7 +23,7 @@ from logging import getLogger from typing import TYPE_CHECKING, Any, Iterator, Literal, Sequence, Type, Union, cast -from pydantic import BaseModel, Extra, validator +from pydantic import BaseModel, ConfigDict, field_validator from moulinette import Moulinette, m18n from moulinette.interfaces.cli import colorize @@ -45,8 +45,11 @@ from yunohost.utils.i18n import _value_for_locale if TYPE_CHECKING: - from pydantic.fields import ModelField + from pydantic import GetJsonSchemaHandler + from pydantic.fields import ValidationInfo + from pydantic.json_schema import JsonSchemaValue from pydantic.typing import AbstractSetIntStr, MappingIntStrAny + from pydantic_core.core_schema import CoreSchema from yunohost.utils.form import FormModel, Hooks from yunohost.log import OperationLogger @@ -126,14 +129,6 @@ class SectionModel(ContainerModel, OptionsModel): is_action_section: bool = False bind: str | None = None - class Config: - @staticmethod - def schema_extra(schema: dict[str, Any]) -> None: - del schema["properties"]["id"] - options = schema["properties"].pop("options") - del schema["required"] - schema["additionalProperties"] = options["items"] - # Don't forget to pass arguments to super init def __init__( self, @@ -162,6 +157,18 @@ def __init__( is_action_section=is_action_section, ) + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: "CoreSchema", handler: "GetJsonSchemaHandler" + ) -> "JsonSchemaValue": + schema = handler(core_schema) + del schema["properties"]["id"] + options = schema["properties"].pop("options") + del schema["required"] + schema["additionalProperties"] = options["items"] + + return schema + def is_visible(self, context: dict[str, Any]) -> bool: if isinstance(self.visible, bool): return self.visible @@ -205,15 +212,9 @@ class PanelModel(ContainerModel): bind: str | None = None sections: list[SectionModel] - class Config: - extra = Extra.allow - - @staticmethod - def schema_extra(schema: dict[str, Any]) -> None: - del schema["properties"]["id"] - del schema["properties"]["sections"] - del schema["required"] - schema["additionalProperties"] = {"$ref": "#/definitions/SectionModel"} + model_config = ConfigDict( + extra="allow", + ) # Don't forget to pass arguments to super init def __init__( @@ -230,6 +231,18 @@ def __init__( id=id, name=name, services=services, help=help, bind=bind, sections=sections ) + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: "CoreSchema", handler: "GetJsonSchemaHandler" + ) -> "JsonSchemaValue": + schema = handler(core_schema) + del schema["properties"]["id"] + del schema["properties"]["sections"] + del schema["required"] + schema["additionalProperties"] = {"$ref": "#/definitions/SectionModel"} + + return schema + def translate(self, i18n_key: str | None = None) -> None: """ Recursivly mutate translatable attributes to their translation @@ -265,29 +278,10 @@ class ConfigPanelModel(BaseModel): i18n: str | None = None panels: list[PanelModel] - class Config: - arbitrary_types_allowed = True - extra = Extra.allow - - @staticmethod - def schema_extra(schema: dict[str, Any]) -> None: - """Update the schema to the expected input - In actual TOML definition, schema is like: - ```toml - [panel_1] - [panel_1.section_1] - [panel_1.section_1.option_1] - ``` - Which is equivalent to `{"panel_1": {"section_1": {"option_1": {}}}}` - so `section_id` (and `option_id`) are additional property of `panel_id`, - which is convinient to write but not ideal to iterate. - In ConfigPanelModel we gather additional properties of panels, sections - and options as lists so that structure looks like: - `{"panels`: [{"id": "panel_1", "sections": [{"id": "section_1", "options": [{"id": "option_1"}]}]}] - """ - del schema["properties"]["panels"] - del schema["required"] - schema["additionalProperties"] = {"$ref": "#/definitions/PanelModel"} + model_config = ConfigDict( + arbitrary_types_allowed=True, + extra="allow", + ) # Don't forget to pass arguments to super init def __init__( @@ -299,6 +293,31 @@ def __init__( panels = [data | {"id": name} for name, data in kwargs.items()] super().__init__(version=version, i18n=i18n, panels=panels) + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: "CoreSchema", handler: "GetJsonSchemaHandler" + ) -> "JsonSchemaValue": + """Update the schema to the expected input + In actual TOML definition, schema is like: + ```toml + [panel_1] + [panel_1.section_1] + [panel_1.section_1.option_1] + ``` + Which is equivalent to `{"panel_1": {"section_1": {"option_1": {}}}}` + so `section_id` (and `option_id`) are additional property of `panel_id`, + which is convinient to write but not ideal to iterate. + In ConfigPanelModel we gather additional properties of panels, sections + and options as lists so that structure looks like: + `{"panels`: [{"id": "panel_1", "sections": [{"id": "section_1", "options": [{"id": "option_1"}]}]}] + """ + schema = handler(core_schema) + del schema["properties"]["panels"] + del schema["required"] + schema["additionalProperties"] = {"$ref": "#/definitions/PanelModel"} + + return schema + @property def sections(self) -> Iterator[SectionModel]: """Convinient prop to iter on all sections""" @@ -369,8 +388,9 @@ def translate(self) -> None: for panel in self.panels: panel.translate(self.i18n) - @validator("version", always=True) - def check_version(cls, value: float, field: "ModelField") -> float: + @field_validator("version") + @classmethod + def check_version(cls, value: float, info: "ValidationInfo") -> float: if value < CONFIG_PANEL_VERSION_SUPPORTED: raise ValueError( f"Config panels version '{value}' are no longer supported." @@ -497,7 +517,7 @@ def get( logger.debug(f"Formating result in '{mode}' mode") if mode == "full": - result = self.config.dict(exclude_none=True) + result = self.config.model_dump(exclude_none=True) for panel in result["panels"]: for section in panel["sections"]: @@ -569,7 +589,7 @@ def set( self.config, self.form = self._get_config_panel() # FIXME find a better way to exclude previous settings - previous_settings = self.form.dict() + previous_settings = self.form.model_dump() # FIXME Not sure if this is need (redact call to operation logger does it on all the instances) # BaseOption.operation_logger = operation_logger @@ -721,7 +741,7 @@ def filter_keys( ) -> "RawConfig": # filter in keys defined in model, filter out panels/sections/options that aren't `key` return OrderedDict( - {k: v for k, v in data.items() if k in model.__fields__ or k == key} + {k: v for k, v in data.items() if k in model.model_fields or k == key} ) raw_config = self._get_raw_config() @@ -807,7 +827,7 @@ def _get_config_panel( settings = ( Settings(**raw_settings) if prevalidate - else Settings.construct(**raw_settings) + else Settings.model_construct(**raw_settings) ) try: @@ -839,7 +859,7 @@ def _ask( for panel in config.panels: if interactive and verbose: Moulinette.display( - colorize(f"\n{'='*40}\n>>>> {panel.name}\n{'='*40}", "purple") + colorize(f"\n{'=' * 40}\n>>>> {panel.name}\n{'=' * 40}", "purple") ) # A section or option may only evaluate its conditions (`visible` @@ -895,9 +915,9 @@ def _apply( exclude_defaults = self.save_mode == "diff" # get settings keys filtered by filter_key - partial_settings_keys = form.__fields__.keys() + partial_settings_keys = form.model_fields.keys() # get filtered settings - partial_settings = form.dict(exclude_defaults=exclude_defaults, exclude=exclude) # type: ignore + partial_settings = form.model_dump(exclude_defaults=exclude_defaults, exclude=exclude) # type: ignore # get previous settings that we will updated with new settings current_settings = self.raw_settings.copy() diff --git a/src/utils/form.py b/src/utils/form.py index b6a6cf9666..cdec6f278b 100644 --- a/src/utils/form.py +++ b/src/utils/form.py @@ -42,16 +42,16 @@ from pydantic import ( BaseModel, - Extra, + ConfigDict, ValidationError, create_model, - validator, - root_validator, + field_validator, + model_validator, ) -from pydantic.color import Color from pydantic.fields import Field -from pydantic.networks import EmailStr, HttpUrl +from pydantic.networks import EmailStr, HttpUrl, Url from pydantic.types import constr +from pydantic_extra_types.color import Color from moulinette import Moulinette, m18n from moulinette.interfaces.cli import colorize @@ -61,7 +61,10 @@ from yunohost.utils.i18n import _value_for_locale if TYPE_CHECKING: - from pydantic.fields import ModelField, FieldInfo + from pydantic import GetJsonSchemaHandler + from pydantic.fields import ValidationInfo, FieldInfo + from pydantic.json_schema import JsonSchemaValue + from pydantic_core.core_schema import CoreSchema logger = getLogger("yunohost.form") @@ -375,49 +378,58 @@ class BaseOption(BaseModel): mode: Mode = ( "bash" # TODO use "python" as default mode with AppConfigPanel setuping it to "bash" ) - ask: Translation | None + ask: Translation | None = None readonly: bool = False visible: JSExpression | bool = True bind: str | None = None name: str | None = None # LEGACY (replaced by `id`) - class Config: - arbitrary_types_allowed = True - use_enum_values = True - validate_assignment = True - extra = Extra.forbid - - @staticmethod - def schema_extra(schema: dict[str, Any]) -> None: - del schema["properties"]["id"] - del schema["properties"]["name"] - schema["required"] = [ - required for required in schema.get("required", []) if required != "id" - ] - if not schema["required"]: - del schema["required"] + model_config = ConfigDict( + arbitrary_types_allowed=True, + use_enum_values=True, + validate_assignment=True, + extra="forbid", + ) + + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: "CoreSchema", handler: "GetJsonSchemaHandler" + ) -> "JsonSchemaValue": + schema = handler(core_schema) + del schema["properties"]["id"] + del schema["properties"]["name"] + schema["required"] = [ + required for required in schema.get("required", []) if required != "id" + ] + if not schema["required"]: + del schema["required"] + + return schema - @validator("id", pre=True) + @field_validator("id", mode="before") + @classmethod def check_id_is_not_forbidden(cls, value: str) -> str: if value in FORBIDDEN_KEYWORDS: raise ValueError(m18n.n("config_forbidden_keyword", keyword=value)) return value # FIXME Legacy, is `name` still needed? - @validator("name") - def apply_legacy_name(cls, value: str | None, values: Values) -> str: + @field_validator("name") + @classmethod + def apply_legacy_name(cls, value: str | None, info: "ValidationInfo") -> str: if value is None: - return values["id"] + return info.data["id"] return value - @validator("readonly", pre=True) - def can_be_readonly(cls, value: bool, values: Values) -> bool: - if value is True and values["type"] in FORBIDDEN_READONLY_TYPES: + @field_validator("readonly", mode="before") + @classmethod + def can_be_readonly(cls, value: bool, info: "ValidationInfo") -> bool: + if value is True and info.data["type"] in FORBIDDEN_READONLY_TYPES: raise ValueError( m18n.n( "config_forbidden_readonly_type", - type=values["type"], - id=values["id"], + type=info.data["type"], + id=info.data["id"], ) ) return value @@ -617,8 +629,9 @@ class BaseInputOption(BaseOption): _annotation: Any = Any _none_as_empty_str: ClassVar[bool] = True - @validator("default", pre=True) - def check_empty_default(value: Any) -> Any: + @field_validator("default", mode="before") + @classmethod + def check_empty_default(cls, value: Any) -> Any: if value == "": return None return value @@ -653,26 +666,27 @@ def _validators(self) -> dict[str, Callable]: def _get_field_attrs(self) -> dict[str, Any]: """ Returns attributes to build a `pydantic.Field`. - This may contains non `Field` attrs that will end up in `Field.extra`. - Those extra can be used as constraints in custom validators and ends up + Extra can be used as constraints in custom validators and ends up in the JSON Schema. """ # TODO # - help # - placeholder - attrs: dict[str, Any] = { + attrs: dict[str, Any] = {} + attrs["json_schema_extra"] = { "redact": self.redact, # extra "none_as_empty_str": self._none_as_empty_str, } if self.readonly: - attrs["allow_mutation"] = False + attrs["frozen"] = True if self.example: attrs["examples"] = [self.example] if self.default is not None: attrs["default"] = self.default + attrs["validate_default"] = True else: attrs["default"] = ... if not self.optional else None @@ -702,16 +716,16 @@ def _get_prompt_message(self, value: Any) -> str: return message - @classmethod - def _value_pre_validator(cls, value: Any, field: "ModelField") -> Any: + @staticmethod + def _value_pre_validator(cls, value: Any, info: "ValidationInfo") -> Any: if value == "": return None return value - @classmethod - def _value_post_validator(cls, value: Any, field: "ModelField") -> Any: - extras = field.field_info.extra + @staticmethod + def _value_post_validator(cls, value: Any, info: "ValidationInfo") -> Any: + extras = cls.model_fields[info.field_name].json_schema_extra if value is None and extras["none_as_empty_str"]: value = "" @@ -749,7 +763,7 @@ class BaseStringOption(BaseInputOption): @property def _dynamic_annotation(self) -> Type[str]: if self.pattern: - return constr(regex=self.pattern.regexp) + return constr(pattern=self.pattern.regexp) return self._annotation @@ -757,7 +771,7 @@ def _get_field_attrs(self) -> dict[str, Any]: attrs = super()._get_field_attrs() if self.pattern: - attrs["regex_error"] = self.pattern.error # extra + attrs["json_schema_extra"]["regex_error"] = self.pattern.error # extra return attrs @@ -834,16 +848,23 @@ class PasswordOption(BaseInputOption): def _get_field_attrs(self) -> dict[str, Any]: attrs = super()._get_field_attrs() - attrs["forbidden_chars"] = self._forbidden_chars # extra + attrs["json_schema_extra"]["forbidden_chars"] = self._forbidden_chars # extra return attrs - @classmethod - def _value_pre_validator(cls, value: str | None, field: "ModelField") -> str | None: - value = super()._value_pre_validator(value, field) + @staticmethod + def _value_pre_validator( + cls, value: str | None, info: "ValidationInfo" + ) -> str | None: + value = super(PasswordOption, PasswordOption)._value_pre_validator( + cls, value, info + ) if value is not None and value != "": - forbidden_chars: str = field.field_info.extra["forbidden_chars"] + value = str(value) + forbidden_chars: str = cls.model_fields[info.field_name].json_schema_extra[ + "forbidden_chars" + ] if any(char in value for char in forbidden_chars): raise YunohostValidationError( "pattern_password_app", forbidden_chars=forbidden_chars @@ -891,14 +912,14 @@ def normalize(value: Color | str | None, option={}) -> str: return super(ColorOption, ColorOption).normalize(value, option) - @classmethod + @staticmethod def _value_post_validator( - cls, value: Color | None, field: "ModelField" + cls, value: Color | None, info: "ValidationInfo" ) -> str | None: if isinstance(value, Color): return value.as_hex() - return super()._value_post_validator(value, field) + return super(ColorOption, ColorOption)._value_post_validator(cls, value, info) # ─ NUMERIC ─────────────────────────────────────────────── @@ -948,7 +969,7 @@ def normalize(value, option={}) -> int | None: if value in [None, ""]: return None - option = option.dict() if isinstance(option, BaseOption) else option + option = option.model_dump() if isinstance(option, BaseOption) else option raise YunohostValidationError( "app_argument_invalid", name=option.get("id"), @@ -959,13 +980,15 @@ def _get_field_attrs(self) -> dict[str, Any]: attrs = super()._get_field_attrs() attrs["ge"] = self.min attrs["le"] = self.max - attrs["step"] = self.step # extra + attrs["json_schema_extra"]["step"] = self.step # extra return attrs - @classmethod - def _value_pre_validator(cls, value: int | None, field: "ModelField") -> int | None: - value = super()._value_pre_validator(value, field) + @staticmethod + def _value_pre_validator( + cls, value: int | None, info: "ValidationInfo" + ) -> int | None: + value = super(NumberOption, NumberOption)._value_pre_validator(cls, value, info) if value is None: return None @@ -1009,7 +1032,7 @@ class BooleanOption(BaseInputOption): @staticmethod def humanize(value, option={}) -> str: - option = option.dict() if isinstance(option, BaseOption) else option + option = option.model_dump() if isinstance(option, BaseOption) else option yes = option.get("yes", 1) no = option.get("no", 0) @@ -1032,7 +1055,7 @@ def humanize(value, option={}) -> str: @staticmethod def normalize(value, option={}) -> Any: - option = option.dict() if isinstance(option, BaseOption) else option + option = option.model_dump() if isinstance(option, BaseOption) else option if isinstance(value, str): value = value.strip() @@ -1075,7 +1098,7 @@ def get(self, key, default=None): def _get_field_attrs(self) -> dict[str, Any]: attrs = super()._get_field_attrs() - attrs["parse"] = { # extra + attrs["json_schema_extra"]["parse"] = { # extra True: self.yes, False: self.no, } @@ -1089,12 +1112,14 @@ def _get_prompt_message(self, value: bool | None) -> str: return message - @classmethod - def _value_post_validator(cls, value: bool | None, field: "ModelField") -> Any: + @staticmethod + def _value_post_validator(cls, value: bool | None, info: "ValidationInfo") -> Any: if isinstance(value, bool): - return field.field_info.extra["parse"][value] + return cls.model_fields[info.field_name].json_schema_extra["parse"][value] - return super()._value_post_validator(value, field) + return super(BooleanOption, BooleanOption)._value_post_validator( + cls, value, info + ) # ─ TIME ────────────────────────────────────────────────── @@ -1122,14 +1147,27 @@ class DateOption(BaseInputOption): default: str | None = None _annotation = datetime.date - @classmethod + @staticmethod + def _value_pre_validator( + cls, v: datetime.date | str | None, info: "ValidationInfo" + ) -> datetime.date | str | None: + v = super(DateOption, DateOption)._value_pre_validator(cls, v, info) + if isinstance(v, int | float) or ( + isinstance(v, str) and v.replace(".", "").replace("-", "", 1).isdigit() + ): + # FIXME use datetime.timezone.utc? or use local timezone + return datetime.date.fromtimestamp(float(v)) + + return v + + @staticmethod def _value_post_validator( - cls, value: datetime.date | None, field: "ModelField" + cls, value: datetime.date | None, info: "ValidationInfo" ) -> str | None: if isinstance(value, datetime.date): return value.isoformat() - return super()._value_post_validator(value, field) + return super(DateOption, DateOption)._value_post_validator(cls, value, info) class TimeOption(BaseInputOption): @@ -1152,15 +1190,29 @@ class TimeOption(BaseInputOption): default: str | int | None = None _annotation = datetime.time - @classmethod + @staticmethod + def _value_pre_validator( + cls, v: Any, info: "ValidationInfo" + ) -> datetime.time | datetime.datetime | None: + v = super(TimeOption, TimeOption)._value_pre_validator(cls, v, info) + if isinstance(v, int | float) or ( + isinstance(v, str) and v.replace(".", "").replace("-", "", 1).isdigit() + ): + value = float(v) + if value >= 0: + return datetime.datetime.fromtimestamp(float(v)).time() + + return v + + @staticmethod def _value_post_validator( - cls, value: datetime.date | None, field: "ModelField" + cls, value: datetime.date | None, info: "ValidationInfo" ) -> str | None: if isinstance(value, datetime.time): # FIXME could use `value.isoformat()` to get `%H:%M:%S` return value.strftime("%H:%M") - return super()._value_post_validator(value, field) + return super(TimeOption, TimeOption)._value_post_validator(cls, value, info) # ─ LOCATIONS ───────────────────────────────────────────── @@ -1206,7 +1258,7 @@ class WebPathOption(BaseStringOption): @staticmethod def normalize(value, option={}) -> str: - option = option.dict() if isinstance(option, BaseOption) else option + option = option.model_dump() if isinstance(option, BaseOption) else option if value is None: value = "" @@ -1253,19 +1305,49 @@ class URLOption(BaseStringOption): type: Literal[OptionType.url] = OptionType.url _annotation = HttpUrl - @classmethod + @staticmethod def _value_post_validator( - cls, value: HttpUrl | None, field: "ModelField" + cls, value: HttpUrl | None, info: "ValidationInfo" ) -> str | None: - if isinstance(value, HttpUrl): + if isinstance(value, Url): return str(value) - return super()._value_post_validator(value, field) + return super(URLOption, URLOption)._value_post_validator(cls, value, info) # ─ FILE ────────────────────────────────────────────────── +def _base_value_post_validator( + cls, value: Any, info: "ValidationInfo" +) -> tuple[bytes, str | None]: + import mimetypes + from pathlib import Path + from magic import Magic + from base64 import b64decode + + if Moulinette.interface.type != "api": + path = Path(value) + if not (path.exists() and path.is_absolute() and path.is_file()): + raise YunohostValidationError("File doesn't exists", raw_msg=True) + content = path.read_bytes() + else: + content = b64decode(value) + + accept_list = cls.model_fields[info.field_name].json_schema_extra.get("accept") + mimetype = Magic(mime=True).from_buffer(content) + + if accept_list and mimetype not in accept_list: + raise YunohostValidationError( + f"Unsupported file type '{mimetype}', expected a type among '{', '.join(accept_list)}'.", + raw_msg=True, + ) + + ext = mimetypes.guess_extension(mimetype) + + return content, ext + + class FileOption(BaseInputOption): r""" Ask for file. @@ -1309,9 +1391,9 @@ def _get_field_attrs(self) -> dict[str, Any]: attrs = super()._get_field_attrs() if self.accept: - attrs["accept"] = self.accept # extra + attrs["json_schema_extra"]["accept"] = self.accept # extra - attrs["bind"] = self.bind + attrs["json_schema_extra"]["bind"] = self.bind return attrs @@ -1322,57 +1404,29 @@ def clean_upload_dirs(cls) -> None: if os.path.exists(upload_dir): shutil.rmtree(upload_dir) - @classmethod - def _base_value_post_validator( - cls, value: Any, field: "ModelField" - ) -> tuple[bytes, str | None]: - import mimetypes - from pathlib import Path - from magic import Magic - from base64 import b64decode - - if Moulinette.interface.type != "api": - path = Path(value) - if not (path.exists() and path.is_absolute() and path.is_file()): - raise YunohostValidationError("File doesn't exists", raw_msg=True) - content = path.read_bytes() - else: - content = b64decode(value) - - accept_list = field.field_info.extra.get("accept") - mimetype = Magic(mime=True).from_buffer(content) - - if accept_list and mimetype not in accept_list: - raise YunohostValidationError( - f"Unsupported file type '{mimetype}', expected a type among '{', '.join(accept_list)}'.", - raw_msg=True, - ) - - ext = mimetypes.guess_extension(mimetype) - - return content, ext - - @classmethod - def _bash_value_post_validator(cls, value: Any, field: "ModelField") -> str: + @staticmethod + def _bash_value_post_validator(cls, value: Any, info: "ValidationInfo") -> str: """File handling for "bash" config panels (app)""" if not value: return "" - content, _ = cls._base_value_post_validator(value, field) + content, _ = _base_value_post_validator(cls, value, info) upload_dir = tempfile.mkdtemp(prefix="ynh_filequestion_") _, file_path = tempfile.mkstemp(dir=upload_dir) FileOption._upload_dirs.add(upload_dir) - logger.debug(f"Saving file {field.name} for file question into {file_path}") + logger.debug( + f"Saving file {info.field_name} for file question into {file_path}" + ) write_to_file(file_path, content, file_mode="wb") return file_path - @classmethod - def _python_value_post_validator(cls, value: Any, field: "ModelField") -> str: + @staticmethod + def _python_value_post_validator(cls, value: Any, info: "ValidationInfo") -> str: """File handling for "python" config panels""" from pathlib import Path @@ -1381,7 +1435,7 @@ def _python_value_post_validator(cls, value: Any, field: "ModelField") -> str: if not value: return "" - bind = field.field_info.extra["bind"] + bind = cls.model_fields[info.field_name].json_schema_extra["bind"] # to avoid "filename too long" with b64 content if len(value.encode("utf-8")) < 255: @@ -1392,7 +1446,7 @@ def _python_value_post_validator(cls, value: Any, field: "ModelField") -> str: ): return value - content, ext = cls._base_value_post_validator(value, field) + content, ext = _base_value_post_validator(cls, value, info) m = hashlib.sha256() m.update(content) @@ -1412,8 +1466,9 @@ class BaseChoicesOption(BaseInputOption): # We do not declare `choices` here to be able to declare other fields before `choices` and acces their values in `choices` validators # choices: dict[str, Any] | list[Any] | None - @validator("choices", pre=True, check_fields=False) - def parse_comalist_choices(value: Any) -> dict[str, Any] | list[Any] | None: + @field_validator("choices", mode="before", check_fields=False) + @classmethod + def parse_comalist_choices(cls, value: Any) -> dict[str, Any] | list[Any] | None: if isinstance(value, str): values = [value.strip() for value in value.split(",")] return [value for value in values if value] @@ -1421,7 +1476,7 @@ def parse_comalist_choices(value: Any) -> dict[str, Any] | list[Any] | None: @property def _dynamic_annotation(self) -> object | Type[str]: - if self.choices is not None: + if self.choices: choices = ( self.choices if isinstance(self.choices, list) else self.choices.keys() ) @@ -1482,7 +1537,7 @@ class SelectOption(BaseChoicesOption): type: Literal[OptionType.select] = OptionType.select filter: Literal[None] = None - choices: list[Any] | dict[str, Any] | None + choices: list[Any] | dict[Any, Any] | None = None default: str | None = None _annotation = str @@ -1549,7 +1604,7 @@ def _dynamic_annotation(self) -> Type[str]: # Repeat pattern stuff since we can't call the bare class `_dynamic_annotation` prop without instantiating it if self.pattern: - return constr(regex=self.pattern.regexp) + return constr(pattern=self.pattern.regexp) return self._annotation @@ -1557,13 +1612,13 @@ def _get_field_attrs(self) -> dict[str, Any]: attrs = super()._get_field_attrs() if self.choices: - attrs["choices"] = self.choices # extra + attrs["json_schema_extra"]["choices"] = self.choices # extra return attrs - @classmethod + @staticmethod def _value_pre_validator( - cls, value: list | str | None, field: "ModelField" + cls, value: list | str | None, info: "ValidationInfo" ) -> str | None: if value is None or value == "": return None @@ -1571,7 +1626,7 @@ def _value_pre_validator( if not isinstance(value, (list, str, type(None))): raise YunohostValidationError( "app_argument_invalid", - name=field.name, + name=info.field_name, error=f"'{str(value)}' is not a list", ) @@ -1580,12 +1635,12 @@ def _value_pre_validator( value = [v for v in value if v] if isinstance(value, list): - choices = field.field_info.extra.get("choices") + choices = cls.model_fields[info.field_name].json_schema_extra.get("choices") if choices: if not all(v in choices for v in value): raise YunohostValidationError( "app_argument_choice_invalid", - name=field.name, + name=info.field_name, value=value, choices=", ".join(str(choice) for choice in choices), ) @@ -1614,27 +1669,30 @@ class DomainOption(BaseChoicesOption): type: Literal[OptionType.domain] = OptionType.domain filter: Literal[None] = None - choices: dict[str, str] | None + choices: dict[str, str] | None = None - @validator("choices", pre=True, always=True) - def inject_domains_choices( - cls, value: dict[str, str] | None, values: Values - ) -> dict[str, str]: + @model_validator(mode="before") + @classmethod + def inject_domains_choices(cls, values: Values) -> Values: # TODO remove calls to resources in validators (pydantic V2 should adress this) from yunohost.domain import domain_list data = domain_list() - return { + values["choices"] = { domain: domain + " ★" if domain == data["main"] else domain for domain in data["domains"] } - @validator("default", pre=True, always=True) - def inject_default(cls, value: str | None, values: Values) -> str | None: + return values + + @model_validator(mode="before") + @classmethod + def inject_default(cls, values: Values) -> Values: # TODO remove calls to resources in validators (pydantic V2 should adress this) from yunohost.domain import _get_maindomain - return _get_maindomain() + values["default"] = _get_maindomain() + return values @staticmethod def normalize(value, option={}) -> str: @@ -1668,12 +1726,11 @@ class AppOption(BaseChoicesOption): type: Literal[OptionType.app] = OptionType.app filter: JSExpression | None = None - choices: dict[str, str] | None + choices: dict[str, str] | None = None - @validator("choices", pre=True, always=True) - def inject_apps_choices( - cls, value: dict[str, str] | None, values: Values - ) -> dict[str, str]: + @model_validator(mode="before") + @classmethod + def inject_apps_choices(cls, values: Values) -> Values: # TODO remove calls to resources in validators (pydantic V2 should adress this) from yunohost.app import app_list @@ -1694,7 +1751,9 @@ def inject_apps_choices( } ) - return value + values["choices"] = value + + return values class UserOption(BaseChoicesOption): @@ -1714,9 +1773,9 @@ class UserOption(BaseChoicesOption): type: Literal[OptionType.user] = OptionType.user filter: Literal[None] = None - choices: dict[str, str] | None + choices: dict[str, str] | None = None - @root_validator(pre=True) + @model_validator(mode="before") def inject_users_choices_and_default(cls, values: Values) -> Values: # TODO remove calls to resources in validators (pydantic V2 should adress this) from yunohost.user import user_list @@ -1764,13 +1823,12 @@ class GroupOption(BaseChoicesOption): type: Literal[OptionType.group] = OptionType.group filter: Literal[None] = None - choices: dict[str, str] | None + choices: dict[str, str] | None = None default: Literal["visitors", "all_users", "admins"] | None = "all_users" - @validator("choices", pre=True, always=True) - def inject_groups_choices( - cls, value: dict[str, str] | None, values: Values - ) -> dict[str, str]: + @model_validator(mode="before") + @classmethod + def inject_groups_choices(cls, values: Values) -> Values: # TODO remove calls to resources in validators (pydantic V2 should adress this) from yunohost.user import user_group_list @@ -1786,14 +1844,19 @@ def _human_readable_group(groupname): else groupname ) - return {groupname: _human_readable_group(groupname) for groupname in groups} + values["choices"] = { + groupname: _human_readable_group(groupname) for groupname in groups + } + + return values - @validator("default", pre=True, always=True) - def inject_default(cls, value: str | None, values: Values) -> str: + @model_validator(mode="before") + @classmethod + def inject_default(cls, values: Values) -> Values: # FIXME do we really want to default to something all the time? - if value is None: - return "all_users" - return value + if values.get("default") in ("", None): + values["default"] = "all_users" + return values OPTIONS = { @@ -1917,9 +1980,11 @@ class FormModel(BaseModel): Base form on which dynamic forms are built upon Options. """ - class Config: - validate_assignment = True - extra = Extra.ignore + model_config = ConfigDict( + validate_assignment=True, + extra="ignore", + coerce_numbers_to_str=True, + ) def __getitem__(self, name: str) -> Any: # FIXME @@ -1950,7 +2015,7 @@ def build_form( """ Returns a dynamic pydantic model class that can be used as a form. Parsing/validation occurs at instanciation and assignements. - To avoid validation at instanciation, use `my_form.construct(**values)` + To avoid validation at instanciation, use `my_form.model_construct(**values)` """ options_as_fields: Any = {} validators: dict[str, Any] = {} @@ -1963,8 +2028,8 @@ def build_form( option_validators = option._validators for step in ("pre", "post"): - validators[f"{option.id}_{step}_validator"] = validator( - option.id, allow_reuse=True, pre=step == "pre" + validators[f"{option.id}_{step}_validator"] = field_validator( + option.id, mode="before" if step == "pre" else "after" )(option_validators[step]) return cast( @@ -2124,13 +2189,16 @@ def prompt_or_validate_form( ) if ( isinstance(option, (BaseStringOption, TagsOption)) - and "regex" in err["type"] + and "pattern" in err["type"] and option.pattern is not None ): err_text = option.pattern.error else: - err_text = m18n.n( - f"pydantic.{err['type']}".replace(".", "_"), **ctx + i18n_key = f"pydantic.{err['type']}".replace(".", "_") + err_text = ( + m18n.n(i18n_key, **ctx) + if m18n.key_exists(i18n_key) + else err["msg"] ) else: err_text = str(e) @@ -2202,7 +2270,7 @@ def ask_questions_and_parse_answers( model_options = parse_raw_options(raw_options, serialize=False) # Build the form from those questions and instantiate it without # parsing/validation (construct) since it may contains required questions. - form = build_form(model_options).construct() + form = build_form(model_options).model_construct() form = prompt_or_validate_form( model_options, form, prefilled_answers=answers, context=context, hooks=hooks ) @@ -2212,13 +2280,15 @@ def ask_questions_and_parse_answers( @overload def parse_raw_options( raw_options: dict[str, Any], serialize: Literal[True] -) -> list[dict[str, Any]]: ... +) -> list[dict[str, Any]]: + ... @overload def parse_raw_options( raw_options: dict[str, Any], serialize: Literal[False] = False -) -> list[AnyOption]: ... +) -> list[AnyOption]: + ... def parse_raw_options( @@ -2233,6 +2303,6 @@ def parse_raw_options( model.translate_options() if serialize: - return model.dict()["options"] + return model.model_dump()["options"] return model.options diff --git a/src/utils/resources.py b/src/utils/resources.py index b4707e5076..adc2364837 100644 --- a/src/utils/resources.py +++ b/src/utils/resources.py @@ -1490,7 +1490,7 @@ def __init__(self, properties: Dict[str, Any], *args, **kwargs): def db_exists(self, db_name): if self.dbtype == "mysql": - return os.system(f"mysqlshow | grep -q -w '{db_name}' 2>/dev/null") == 0 + return os.system(f"mariadb-show | grep -q -w '{db_name}' 2>/dev/null") == 0 elif self.dbtype == "postgresql": return ( os.system( diff --git a/tox.ini b/tox.ini index 906864b4ff..3237afb67a 100644 --- a/tox.ini +++ b/tox.ini @@ -1,15 +1,15 @@ [tox] -envlist = py311-{lint,invalidcode},py311-black-{run,check} +envlist = py312-{lint,invalidcode},py312-black-{run,check} [testenv] skip_install=True deps = - py311-{lint,invalidcode}: flake8 - py311-black-{run,check}: black - py311-mypy: mypy >= 0.900 + py312-{lint,invalidcode}: flake8 + py312-black-{run,check}: black + py312-mypy: mypy >= 0.900 commands = - py311-lint: flake8 src doc maintenance tests --ignore E402,E501,E203,W503,E741 --exclude src/tests,src/vendor - py311-invalidcode: flake8 src bin maintenance --exclude src/tests,src/vendor --select F,E722,W605 - py311-black-check: black --check --diff bin src doc maintenance tests - py311-black-run: black bin src doc maintenance tests - py311-mypy: mypy --ignore-missing-import --install-types --non-interactive --follow-imports silent src/ --exclude (acme_tiny|migrations|tests) + py312-lint: flake8 src doc maintenance tests --ignore E402,E501,E203,W503,E741 --exclude src/tests,src/vendor + py312-invalidcode: flake8 src bin maintenance --exclude src/tests,src/vendor --select F,E722,W605 + py312-black-check: black --check --diff bin src doc maintenance tests + py312-black-run: black bin src doc maintenance tests + py312-mypy: mypy --ignore-missing-import --install-types --non-interactive --follow-imports silent src/ --exclude (acme_tiny|migrations|tests)