Skip to content

Commit

Permalink
final_upload_60
Browse files Browse the repository at this point in the history
  • Loading branch information
dut-media-lab committed Sep 5, 2020
1 parent f7aa4e2 commit 9618ac5
Show file tree
Hide file tree
Showing 18 changed files with 55 additions and 122 deletions.
3 changes: 2 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ script:
- coverage run -p test_meta_repr.py --mode=train --dataset=omniglot --classes=5 --examples_train=1 --examples_test=15 --meta_batch_size=1 --n_meta_iterations=100 --T=2 --meta-lr=0.001 --lr=0.1 --method=MetaRepr --inner_method=Trad --outer_method=Reverse --inner_opt=Adam --outer_opt=Adam --clip_value=10.0 --logdir=../tmp/ --print-interval=100 --truncate_iter=1 --save_interval=100 --Notes=toolkit_test_TRHG
- coverage run -p test_meta_repr.py --mode=test --dataset=omniglot --classes=5 --examples_train=1 --examples_test=15 --meta_batch_size=1 --n_meta_iterations=100 --T=2 --meta-lr=0.001 --lr=0.1 --method=MetaRepr --inner_method=Trad --outer_method=Reverse --inner_opt=Adam --outer_opt=Adam --clip_value=10.0 --logdir=../tmp/ --expdir=../tmp/ --print-interval=100 --truncate_iter=1 --save_interval=100 --Notes=toolkit_test_TRHG
- coverage run -p test_meta_repr.py --mode=train --dataset=omniglot --classes=5 --examples_train=1 --examples_test=15 --meta_batch_size=1 --n_meta_iterations=100 --T=1 --meta-lr=0.001 --lr=0.1 --method=MetaRepr --inner_method=Aggr --learn_alpha_itr=True --learn_st=True --outer_method=Reverse --inner_opt=Momentum --outer_opt=Momentum --logdir=../tmp/ --alpha=0.7 --print-interval=100 --save_interval=100 --Notes=toolkit_test_BDA
- coverage run -p test_meta_repr.py --mode=train --dataset=omniglot --classes=5 --examples_train=1 --examples_test=15 --meta_batch_size=1 --n_meta_iterations=100 --T=1 --meta-lr=0.001 --lr=0.1 --method=MetaRepr --inner_method=Aggr --outer_method=Darts --alpha=0.7 --logdir=../tmp/ --print-interval=100 --save_interval=100 --Notes=toolkit_test_DARTS
- coverage run -p test_meta_repr.py --mode=train --dataset=omniglot --classes=5 --examples_train=1 --examples_test=15 --meta_batch_size=2 --n_meta_iterations=100 --T=1 --meta-lr=0.001 --lr=0.1 --method=MetaRepr --inner_method=Aggr --outer_method=Darts --alpha=0.7 --logdir=../tmp/ --print-interval=100 --save_interval=100 --Notes=toolkit_test_DARTS
- coverage run -p test_meta_repr.py --mode=train --dataset=omniglot --classes=5 --examples_train=1 --examples_test=15 --meta_batch_size=2 --n_meta_iterations=100 --T=1 --meta-lr=0.001 --lr=0.1 --method=MetaRepr --inner_method=Aggr --outer_method=Darts --alpha=0.7 --learn_alpha_itr=True --logdir=../tmp/ --print-interval=100 --save_interval=100 --Notes=toolkit_test_DARTS
- coverage run -p test_meta_repr.py --mode=train --dataset=omniglot --classes=5 --examples_train=1 --examples_test=15 --meta_batch_size=1 --n_meta_iterations=100 --T=1 --meta-lr=0.001 --lr=0.1 --method=MetaRepr --inner_method=Trad --outer_method=Implicit --inner_opt=SGD --outer_opt=Adam --logdir=../tmp/ --print-interval=100 --save_interval=100 --Notes=toolkit_test_Implicit
- coverage combine

Expand Down
72 changes: 30 additions & 42 deletions boml/boml_optimizer/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __init__(
:param Method: define basic method for following training process, it should be included in ['MetaInit', 'MetaRepr'],
'MetaInit' type includes methods like 'MAML, FOMAML, TNet, WarpGrad'; 'MetaRepr' type includes methods like
'BDA, RHG, TRHG, Implicit HG, DARTS';
'BDA, RHG, Truncated, Implicit HG, DARTS';
:param inner_method: method chosen for solving LLproblem, including ['Trad' ,'Simple', 'Aggr'], 'MetaRepr' type choose
either 'Trad' for traditional optimization strategies or 'Aggr' for Gradient Aggragation optimization 'MetaInit' type
should choose 'Simple', and set specific parameters for detailed method choices like FOMAML or TNet.
Expand All @@ -56,21 +56,25 @@ def __init__(
), "initialize method arguement, should be in list [Aggr, Simple, Trad]"
self.inner_method = inner_method

if self.inner_method in ("Aggr", "Trad") and outer_method == "Reverse":
outer_method = "Reverse"
elif self.inner_method == "Simple" and outer_method == "Simple":
outer_method = "Simple"
elif self.inner_method == "Trad" and outer_method == "Implicit":
outer_method = "Implicit"
elif self.inner_method in ("Aggr", "Trad") and outer_method == "Darts":
outer_method = "Darts"
if self.inner_method == "Simple":
assert (
outer_method == "Simple"
), "Choose simple configuration of lower-level and upper-level " \
"calculation strategy for meta-initialization-based method"
elif self.inner_method == "Aggr":
assert outer_method in (
"Darts",
"Reverse",
), "The bilevel aggregation strategy could choose Reverse Auto Differentiation" \
" or DARTS as upper-level calculation strategy"
else:
print(
"Invalid combination of inner and outer methods, \
please check the initialization for different level of problems or "
"extend the base classes to formulate your own problems definition"
)
raise AssertionError
assert outer_method in (
"Reverse",
"Implicit",
"Darts",
), "Invalid combination of inner and outer strategies, " \
"please check initialization for different level of problems or " \
"extend the base classes to formulate your own problems definition"
self.outer_method = outer_method
self._inner_gradient = getattr(
inner_grads, "%s%s" % ("BOMLInnerGrad", self.inner_method)
Expand Down Expand Up @@ -109,6 +113,8 @@ def meta_learner(
name="Hyper_Net",
use_T=False,
use_Warp=False,
model_loss_func=utils.cross_entropy,
outer_loss_func=utils.cross_entropy,
**model_args
):
"""
Expand All @@ -119,21 +125,15 @@ def meta_learner(
'v2' for Residual blocks with fully connected layer.
:param name: name for Meta model modules used for BMLNet initialization
:param use_T: whether to use T layer for C4L neural networks
:param model_loss_func: forms of loss functions of task-specific layer corresponding to 'WarpGrad' method, default to be cross-entropy
:param outer_loss_func: forms of loss functions of task-specific layer corresponding to 'WarpGrad' method, default to be cross-entropy
:return: BMLNet object containing the dict of hyper parameters
"""

self.param_dict["use_T"] = use_T
self.param_dict["use_Warp"] = use_Warp
self.param_dict["output_shape"] = dataset.train.dim_target
if use_Warp:
if "model_loss_func" in model_args.keys():
self.param_dict["model_loss_func"] = model_args["model_loss_func"]
else:
self.param_dict["model_loss_func"] = utils.cross_entropy
if "outer_loss_func" in model_args.keys():
self.param_dict["outer_loss_func"] = model_args["outer_loss_func"]
else:
self.param_dict["outer_loss_func"] = utils.cross_entropy
self.param_dict["model_loss_func"] = model_loss_func
self.param_dict["outer_loss_func"] = outer_loss_func
self.data_set = dataset
assert meta_model.startswith(
"V"
Expand Down Expand Up @@ -369,13 +369,12 @@ def ul_problem(
outer_objective,
meta_learning_rate,
inner_grad,
mlr_decay=1.e-5,
mlr_decay=1.0e-5,
meta_param=None,
outer_objective_optimizer="Adam",
epsilon=1.0,
momentum=0.5,
tolerance=lambda _k: 0.1 * (0.9 ** _k),
global_step=None,
):
"""
Set the outer optimization problem and the descent procedure for the optimization of the
Expand Down Expand Up @@ -408,9 +407,9 @@ def ul_problem(
learning_rate=self._meta_learning_rate, momentum=momentum
)
else:
self.oo_opt = getattr(boml_optimizer, "%s%s" % ("BOMLOpt", outer_objective_optimizer))(
learning_rate=self._learning_rate,
)
self.oo_opt = getattr(
boml_optimizer, "%s%s" % ("BOMLOpt", outer_objective_optimizer)
)(learning_rate=self._learning_rate,)
assert isinstance(
self._outer_gradient, getattr(hyper_grads, "BOMLOuterGrad")
), "Wrong name for inner method,should be in list \n [Reverse, Simple, Forward, Implicit]"
Expand All @@ -425,9 +424,7 @@ def ul_problem(
), "Wrong name for outer method,should be in list [Darts]"
setattr(self.outergradient, "Epsilon", tf.cast(epsilon, tf.float32))
setattr(self.outergradient, "param_dict", self.param_dict)
if self.outer_method == "Implicit" and (
not hasattr(self.outergradient, "tolerance")
):
if self.outer_method == "Implicit":
self.outergradient.set_tolerance(tolerance=tolerance)
meta_param = self.outergradient.compute_gradients(
outer_objective,
Expand All @@ -437,8 +434,6 @@ def ul_problem(
)
self._o_optim_dict[self.oo_opt].update(meta_param)

if global_step is not None:
self._global_step = global_step
return self

def minimize(
Expand Down Expand Up @@ -512,8 +507,6 @@ def minimize(
epsilon=epsilon,
)

return

def aggregate_all(self, aggregation_fn=None, gradient_clip=None):
"""
To be called when no more dynamics or problems will be added, computes the updates
Expand Down Expand Up @@ -550,11 +543,6 @@ def maybe_first_arg(_v):
if self._global_step:
with tf.control_dependencies([self._fin_hts]):
self._fin_hts = self._global_step.assign_add(1).op
else:
raise ValueError(
"BOMLOptimizer.Aggregate_all has already been called on "
+ "this object, further calls have no effect"
)
return self.run

def run(
Expand Down
16 changes: 4 additions & 12 deletions boml/load_data/experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,10 @@ def __init__(self, datasets, dtype=tf.float32):

# noinspection PyBroadException
def _compute_input_shape(self):
try:
sh = self.datasets.train.dim_data
return (None, sh) if isinstance(sh, int) else (None,) + sh
except:
print("Could not determine input dimension")
return None
sh = self.datasets.train.dim_data
return (None, sh) if isinstance(sh, int) else (None,) + sh

# noinspection PyBroadException
def _compute_output_shape(self):
try:
sh = self.datasets.train.dim_target
return (None, sh) if isinstance(sh, int) else (None,) + sh
except:
print("Could not determine output dimension")
return None
sh = self.datasets.train.dim_target
return (None, sh) if isinstance(sh, int) else (None,) + sh
18 changes: 0 additions & 18 deletions boml/lower_iter/aggr.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,24 +124,6 @@ def apply_updates(self):
"""
return self._updates_op

@property
def iteration(self):
"""
Performs a descent step (as return by `tf.train.Optimizer.apply_gradients`) and computes the values of
the variables after it.
:return: A list of operation that, after performing one iteration, return the value of the state variables
being optimized (possibly including auxiliary variables)
"""
if self._iteration is None:
with tf.control_dependencies([self._updates_op]):
self._iteration = (
self._state_read()
) # performs an iteration and returns the
# value of all variables in the state (ordered according to dyn)

return self._iteration

@property
def initialization(self):
"""
Expand Down
18 changes: 0 additions & 18 deletions boml/lower_iter/simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,24 +170,6 @@ def model_param_tensor(self):
), "temporary weights dictionary must be initialized before being called"
return self._model_param_tensor

@property
def iteration(self):
"""
Performs a descent step (as return by `tf.train.Optimizer.apply_gradients`) and computes the values of
the variables after it.
:return: A list of operation that, after performing one iteration, return the value of the state variables
being optimized (possibly including auxiliary variables)
"""
if self._iteration is None:
with tf.control_dependencies([self._updates_op]): # ?
self._iteration = (
self._state_read()
) # performs an iteration and returns the
# value of all variables in the state (ordered according to dyn)

return self._iteration

@property
def apply_updates(self):
"""
Expand Down
10 changes: 0 additions & 10 deletions boml/setup_model/feedforward.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,16 +46,6 @@ def _forward(self):
self.task_parameter["fc_bias"],
)

if self.use_T:
conv_z = tf.get_variable(
initializer=tf.eye(self.dims[-1]),
dtype=tf.float32,
collections=self.var_collections,
trainable=False,
name="conv_z",
)
self + tf.matmul(self.out, conv_z)

def create_initial_parameter(self):
self.task_parameter = OrderedDict()
self.task_parameter["fc_weight"] = tf.get_variable(
Expand Down
6 changes: 3 additions & 3 deletions boml/setup_model/meta_init_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def __init__(
self,
_input,
dim_output,
name="BMLNetC4LHO",
name="BOMLNetMetaInitV1",
outer_param_dict=OrderedDict(),
model_param_dict=None,
task_parameter=None,
Expand Down Expand Up @@ -231,7 +231,7 @@ def BOMLNetOmniglotMetaInitV1(
outer_param_dict=OrderedDict(),
model_param_dict=OrderedDict(),
batch_norm=layers.batch_norm,
name="BMLNetC4LOmniglot",
name="BOMLNetOmniglotMetaInitV1",
outer_method="Simple",
use_T=False,
use_Warp=False,
Expand All @@ -257,7 +257,7 @@ def BOMLNetMiniMetaInitV1(
outer_param_dict=OrderedDict(),
model_param_dict=OrderedDict(),
batch_norm=layers.batch_norm,
name="BMLNetC4LMini",
name="BOMLNetMetaInitV1",
outer_method="Simple",
use_T=False,
use_Warp=False,
Expand Down
2 changes: 1 addition & 1 deletion boml/setup_model/meta_init_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ def __init__(
self,
_input,
dim_output,
name="BMLNetResHO",
name="BOMLNetMiniMetaInitV2",
outer_param_dict=OrderedDict(),
model_param_dict=OrderedDict(),
task_parameter=None,
Expand Down
2 changes: 1 addition & 1 deletion boml/setup_model/meta_repr_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ def BOMLNetMiniMetaReprV1(
model_param_dict=OrderedDict(),
dim_output=-1,
batch_norm=layers.batch_norm,
name="BMLNetC4LMini",
name="BOMLNetC4LMini",
use_T=False,
use_Warp=False,
outer_method="Reverse",
Expand Down
10 changes: 2 additions & 8 deletions boml/setup_model/meta_repr_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ class BOMLNetMiniMetaReprV2(BOMLNet):
def __init__(
self,
_input,
name="BMLNetMetaReprMini",
name="BOMLNetMiniMetaReprV2",
outer_param_dict=OrderedDict(),
dim_output=-1,
model_param_dict=OrderedDict(),
Expand Down Expand Up @@ -100,7 +100,7 @@ class BOMLNetOmniglotMetaReprV2(BOMLNet):
def __init__(
self,
_input,
name="BMLNetMetaReprOmniglot",
name="BOMLNetOmniglotMetaReprV2",
outer_param_dict=OrderedDict(),
dim_output=-1,
model_param_dict=OrderedDict(),
Expand Down Expand Up @@ -181,9 +181,3 @@ def re_forward(self, new_input=None):
outer_method=self.outer_method,
)


if __name__ == "__main__":
inp = tf.placeholder(tf.float32, (None, 84, 84, 3))
net = BOMLNetMiniMetaReprV2(inp)
print(net.out)
print(boml.extension.metaparameters())
8 changes: 4 additions & 4 deletions boml/upper_iter/implicit.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ def __init__(
self,
inner_method="Trad",
linear_system_solver_gen=None,
tolerance=None,
name="BMLOuterGradImplicit",
):
super(BOMLOuterGradImplicit, self).__init__(name)
Expand All @@ -36,9 +35,7 @@ def __init__(
)
self.linear_system_solver = linear_system_solver_gen

if tolerance is None:
tolerance = lambda _k: 0.1 * (0.9 ** _k)
self.tolerance = tolerance
self.tolerance = lambda _k: 0.1 * (0.9 ** _k)

self._lin_sys = []
self._qs = []
Expand Down Expand Up @@ -136,3 +133,6 @@ def _forward_step(self, ss, _fd):

def _run_batch_initialization(self, ss, fd):
ss.run(self.initialization, feed_dict=fd)

def set_tolerance(self,tolerance=lambda _k: 0.1 * (0.9 ** _k)):
self.tolerance =tolerance
3 changes: 1 addition & 2 deletions boml/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,14 +125,13 @@ def solve_int_or_generator(int_or_generator):
def cross_entropy(pred, label, method="MetaInit"):

# Note - with tf version <=0.12, this loss has incorrect 2nd derivatives
assert method in ('MetaInit', 'MetaRepr'), 'Wrong value for argument method'
if method == "MetaInit":
return tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=label)
elif method == "MetaRepr":
return tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=label)
)
else:
raise AssertionError


def classification_acc(pred, label):
Expand Down
Binary file removed test_script/__pycache__/__init__.cpython-36.pyc
Binary file not shown.
Binary file removed test_script/__pycache__/__init__.cpython-37.pyc
Binary file not shown.
Binary file removed test_script/__pycache__/script_helper.cpython-36.pyc
Binary file not shown.
Binary file removed test_script/__pycache__/script_helper.cpython-37.pyc
Binary file not shown.
1 change: 1 addition & 0 deletions test_script/test_meta_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ def build(metasets, learn_lr, lr0, MBS, T, mlr0,mlr_decay=1.e-5, process_fn=None
boml_ho.ul_problem(outer_objective=ex.errors['validation'], meta_learning_rate=mlr0, inner_grad=optim_dict,
outer_objective_optimizer=args.outer_opt,mlr_decay=mlr_decay,
meta_param=tf.get_collection(boml.extension.GraphKeys.METAPARAMETERS))
meta_learner = boml_ho.meta_model
meta_learning_rate = boml_ho.meta_learning_rate
apply_updates = boml_ho.outergradient.apply_updates
inner_objectives= boml_ho.inner_objectives
Expand Down
8 changes: 6 additions & 2 deletions test_script/test_setup_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,17 @@ def main():
# def test_meta_init_v2():
_input_1 = tf.placeholder(tf.float32, (None, 28, 28, 1))
_input_2 = tf.placeholder(tf.float32, (None, 84, 84, 3))
boml_meta_repr_v1= boml.BOMLNetMiniMetaReprV1(_input_2,use_T=True,use_Warp=True)
boml_meta_repr_v1_t = boml.BOMLNetMiniMetaReprV1(_input_2, use_T=True,use_Warp=True)
boml_meta_repr_v1_warp = boml.BOMLNetMiniMetaReprV1(_input_2, use_T=True,use_Warp=False,name='warp_test')
boml_meta_init_v1_mini = boml.BOMLNetMiniMetaInitV1(_input_2, dim_output=5)
boml_meta_init_v2_omniglot = boml.BOMLNetOmniglotMetaInitV2(_input_1, dim_output=5)
boml_meta_init_v2 = boml.BOMLNetMiniMetaInitV2(_input_2, dim_output=5)
boml_meta_repr_v2_omniglot = boml.BOMLNetOmniglotMetaReprV2(_input_1)
boml_meta_repr_v2 = boml.BOMLNetMiniMetaReprV2(_input_2)

print(boml_meta_repr_v1.out)
print(boml_meta_init_v1_mini.out)
print(boml_meta_repr_v1_t.out)
print(boml_meta_repr_v1_warp.out)
print(boml_meta_init_v2.out)
print(boml_meta_init_v2.re_forward().out)
print(boml_meta_init_v2_omniglot.out)
Expand Down

0 comments on commit 9618ac5

Please sign in to comment.