Skip to content

Commit

Permalink
Making Diffq optional (facebookresearch#496)
Browse files Browse the repository at this point in the history
* removing diffq as required dependency

* plop

* release
  • Loading branch information
adefossez authored May 23, 2023
1 parent a8154eb commit 8339657
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 4 deletions.
9 changes: 8 additions & 1 deletion demucs/pretrained.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

from .hdemucs import HDemucs
from .repo import RemoteRepo, LocalRepo, ModelOnlyRepo, BagOnlyRepo, AnyModelRepo, ModelLoadingError # noqa
from .states import _check_diffq

logger = logging.getLogger(__name__)
ROOT_URL = "https://dl.fbaipublicfiles.com/demucs/"
Expand Down Expand Up @@ -71,7 +72,13 @@ def get_model(name: str,
model_repo = LocalRepo(repo)
bag_repo = BagOnlyRepo(repo, model_repo)
any_repo = AnyModelRepo(model_repo, bag_repo)
model = any_repo.get_model(name)
try:
model = any_repo.get_model(name)
except ImportError as exc:
if 'diffq' in exc.args[0]:
_check_diffq()
raise

model.eval()
return model

Expand Down
17 changes: 16 additions & 1 deletion demucs/states.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,19 +16,32 @@
import warnings

from omegaconf import OmegaConf
from diffq import DiffQuantizer, UniformQuantizer, restore_quantized_state
from dora.log import fatal
import torch


def _check_diffq():
try:
import diffq # noqa
except ImportError:
fatal('Trying to use DiffQ, but diffq is not installed.\n'
'On Windows run: python.exe -m pip install diffq \n'
'On Linux/Mac, run: python3 -m pip install diffq')


def get_quantizer(model, args, optimizer=None):
"""Return the quantizer given the XP quantization args."""
quantizer = None
if args.diffq:
_check_diffq()
from diffq import DiffQuantizer
quantizer = DiffQuantizer(
model, min_size=args.min_size, group_size=args.group_size)
if optimizer is not None:
quantizer.setup_optimizer(optimizer)
elif args.qat:
_check_diffq()
from diffq import UniformQuantizer
quantizer = UniformQuantizer(
model, bits=args.qat, min_size=args.min_size)
return quantizer
Expand Down Expand Up @@ -86,6 +99,8 @@ def set_state(model, state, quantizer=None):
if quantizer is not None:
quantizer.restore_quantized_state(model, state['quantized'])
else:
_check_diffq()
from diffq import restore_quantized_state
restore_quantized_state(model, state)
else:
model.load_state_dict(state)
Expand Down
2 changes: 2 additions & 0 deletions docs/release.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
Various improvements by @CarlGao4. Support for `segment` param inside of HTDemucs
model.

Made diffq an optional dependency, with an error message if not installed.

## V4.0.0, 7th of December 2022

Adding hybrid transformer Demucs model.
Expand Down
2 changes: 1 addition & 1 deletion hubconf.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.

dependencies = ['dora-search', 'diffq', 'julius', 'lameenc', 'openunmix', 'pyyaml',
dependencies = ['dora-search', 'julius', 'lameenc', 'openunmix', 'pyyaml',
'torch', 'torchaudio', 'tqdm']

from demucs.pretrained import get_model
Expand Down
1 change: 0 additions & 1 deletion requirements_minimal.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
# please make sure you have already a pytorch install that is cuda enabled!
dora-search
diffq>=0.2.1
einops
julius>=0.2.3
lameenc>=1.2
Expand Down

0 comments on commit 8339657

Please sign in to comment.