Skip to content

Commit

Permalink
WIP: better management of legacy positional arguments
Browse files Browse the repository at this point in the history
  • Loading branch information
lrzpellegrini committed Feb 1, 2024
1 parent 8af1282 commit 043effa
Show file tree
Hide file tree
Showing 28 changed files with 427 additions and 341 deletions.
3 changes: 1 addition & 2 deletions avalanche/training/supervised/ar1.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ class AR1(SupervisedTemplate):

def __init__(
self,
*args,
*,
criterion: CriterionType = None,
lr: float = 0.001,
inc_lr: float = 5e-5,
Expand Down Expand Up @@ -169,7 +169,6 @@ def __init__(
self.replay_mb_size = 0

super().__init__(
legacy_positional_args=args,
model=model,
optimizer=optimizer,
criterion=criterion,
Expand Down
9 changes: 4 additions & 5 deletions avalanche/training/supervised/cumulative.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ class Cumulative(SupervisedTemplate):

def __init__(
self,
*args,
model: Module = "not_set",
optimizer: Optimizer = "not_set",
criterion: CriterionType = "not_set",
*,
model: Module,
optimizer: Optimizer,
criterion: CriterionType,
train_mb_size: int = 1,
train_epochs: int = 1,
eval_mb_size: Optional[int] = None,
Expand Down Expand Up @@ -55,7 +55,6 @@ def __init__(
"""

super().__init__(
legacy_positional_args=args,
model=model,
optimizer=optimizer,
criterion=criterion,
Expand Down
22 changes: 6 additions & 16 deletions avalanche/training/supervised/deep_slda.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import os
import torch
from torch.nn import Module

from avalanche.training.plugins import SupervisedPlugin
from avalanche.training.templates import SupervisedTemplate
Expand All @@ -29,11 +30,11 @@ class StreamingLDA(SupervisedTemplate):

def __init__(
self,
*args,
slda_model="not_set",
criterion: CriterionType = "not_set",
input_size="not_set",
num_classes: int = "not_set",
*,
slda_model: Module,
criterion: CriterionType,
input_size: int,
num_classes: int,
output_layer_name: Optional[str] = None,
shrinkage_param=1e-4,
streaming_update_sigma=True,
Expand Down Expand Up @@ -74,24 +75,13 @@ def __init__(
if plugins is None:
plugins = []

adapt_legacy_args = slda_model == "not_set"
slda_model = slda_model if slda_model != "not_set" else args[0]
slda_model = slda_model.eval()
if output_layer_name is not None:
slda_model = FeatureExtractorBackbone(
slda_model.to(device), output_layer_name
).eval()

# Legacy positional arguments support (deprecation cycle)
if adapt_legacy_args:
args = list(args)
args[0] = slda_model

input_size = input_size if input_size != "not_set" else args[2]
num_classes = num_classes if num_classes != "not_set" else args[3]

super(StreamingLDA, self).__init__(
legacy_positional_args=args,
model=slda_model,
optimizer=None, # type: ignore
criterion=criterion,
Expand Down
7 changes: 3 additions & 4 deletions avalanche/training/supervised/der.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,9 +157,9 @@ class DER(SupervisedTemplate):

def __init__(
self,
*args,
model: Module = "not_set",
optimizer: Optimizer = "not_set",
*,
model: Module,
optimizer: Optimizer,
criterion: CriterionType = CrossEntropyLoss(),
mem_size: int = 200,
batch_size_mem: Optional[int] = None,
Expand Down Expand Up @@ -211,7 +211,6 @@ def __init__(
`eval_every` experience or iterations (Default='experience').
"""
super().__init__(
legacy_positional_args=args,
model=model,
optimizer=optimizer,
criterion=criterion,
Expand Down
7 changes: 3 additions & 4 deletions avalanche/training/supervised/er_ace.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ class ER_ACE(SupervisedTemplate):

def __init__(
self,
*args,
model: Module = "not_set",
optimizer: Optimizer = "not_set",
*,
model: Module,
optimizer: Optimizer,
criterion: CriterionType = CrossEntropyLoss(),
mem_size: int = 200,
batch_size_mem: int = 10,
Expand Down Expand Up @@ -74,7 +74,6 @@ def __init__(
`eval_every` epochs or iterations (Default='epoch').
"""
super().__init__(
legacy_positional_args=args,
model=model,
optimizer=optimizer,
criterion=criterion,
Expand Down
13 changes: 5 additions & 8 deletions avalanche/training/supervised/er_aml.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@ class ER_AML(SupervisedTemplate):

def __init__(
self,
*args,
model: Module = "not_set",
feature_extractor: Module = "not_set",
optimizer: Optimizer = "not_set",
*,
model: Module,
feature_extractor: Module,
optimizer: Optimizer,
criterion: CriterionType = CrossEntropyLoss(),
temp: float = 0.1,
base_temp: float = 0.07,
Expand Down Expand Up @@ -77,7 +77,6 @@ def __init__(
`eval_every` epochs or iterations (Default='epoch').
"""
super().__init__(
legacy_positional_args=args,
model=model,
optimizer=optimizer,
criterion=criterion,
Expand All @@ -98,9 +97,7 @@ def __init__(
)
self.replay_loader = None
self.aml_criterion = AMLCriterion(
feature_extractor=(
feature_extractor if feature_extractor != "not_set" else args[1]
),
feature_extractor=feature_extractor,
temp=temp,
base_temp=base_temp,
same_task_neg=same_task_neg,
Expand Down
7 changes: 3 additions & 4 deletions avalanche/training/supervised/expert_gate.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,9 @@ class ExpertGateStrategy(SupervisedTemplate):

def __init__(
self,
*args,
model: Module = "not_set",
optimizer: Optimizer = "not_set",
*,
model: Module,
optimizer: Optimizer,
criterion: CriterionType = CrossEntropyLoss(),
train_mb_size: int = 1,
train_epochs: int = 1,
Expand Down Expand Up @@ -109,7 +109,6 @@ def __init__(
)

super().__init__(
legacy_positional_args=args,
model=model,
optimizer=optimizer,
criterion=criterion,
Expand Down
7 changes: 3 additions & 4 deletions avalanche/training/supervised/feature_replay.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,9 @@ class FeatureReplay(SupervisedTemplate):

def __init__(
self,
*args,
model: nn.Module = "not_set",
optimizer: Optimizer = "not_set",
*,
model: nn.Module,
optimizer: Optimizer,
criterion: CriterionType = MaskedCrossEntropy(),
last_layer_name: str = "classifier",
mem_size: int = 200,
Expand All @@ -67,7 +67,6 @@ def __init__(
**kwargs
):
super().__init__(
legacy_positional_args=args,
model=model,
optimizer=optimizer,
criterion=criterion,
Expand Down
25 changes: 12 additions & 13 deletions avalanche/training/supervised/icarl.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,13 @@ class ICaRL(SupervisedTemplate):

def __init__(
self,
*args,
feature_extractor: Module = "not_set",
classifier: Module = "not_set",
optimizer: Optimizer = "not_set",
memory_size="not_set",
buffer_transform="not_set",
fixed_memory="not_set",
*,
feature_extractor: Module,
classifier: Module,
optimizer: Optimizer,
memory_size: int,
buffer_transform,
fixed_memory: bool,
train_mb_size: int = 1,
train_epochs: int = 1,
eval_mb_size: Optional[int] = None,
Expand Down Expand Up @@ -73,16 +73,16 @@ def __init__(
learning experience.
"""
model = TrainEvalModel(
feature_extractor if feature_extractor != "not_set" else args[0],
train_classifier=classifier if classifier != "not_set" else args[1],
feature_extractor,
train_classifier=classifier,
eval_classifier=NCMClassifier(normalize=True),
)

criterion = ICaRLLossPlugin() # iCaRL requires this specific loss (#966)
icarl = _ICaRLPlugin(
memory_size if memory_size != "not_set" else args[3],
buffer_transform if buffer_transform != "not_set" else args[4],
fixed_memory if fixed_memory != "not_set" else args[5],
memory_size,
buffer_transform,
fixed_memory,
)

if plugins is None:
Expand All @@ -94,7 +94,6 @@ def __init__(
plugins += [criterion]

super().__init__(
legacy_positional_args=args,
model=model,
optimizer=optimizer,
criterion=criterion,
Expand Down
9 changes: 4 additions & 5 deletions avalanche/training/supervised/joint_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,10 @@ class JointTraining(SupervisedTemplate[TDatasetExperience, TMBInput, TMBOutput])

def __init__(
self,
*args,
model: Module = "not_set",
optimizer: Optimizer = "not_set",
criterion: CriterionType = "not_set",
*,
model: Module,
optimizer: Optimizer,
criterion: CriterionType,
train_mb_size: int = 1,
train_epochs: int = 1,
eval_mb_size: int = 1,
Expand Down Expand Up @@ -91,7 +91,6 @@ def __init__(
`eval` is called every `eval_every` epochs and at the end of the
learning experience."""
super().__init__(
legacy_positional_args=args,
model=model,
optimizer=optimizer,
criterion=criterion,
Expand Down
7 changes: 3 additions & 4 deletions avalanche/training/supervised/l2p.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ class LearningToPrompt(SupervisedTemplate):

def __init__(
self,
*args,
model_name: str = "not_set",
*,
model_name: str,
criterion: nn.Module = nn.CrossEntropyLoss(),
train_mb_size: int = 1,
train_epochs: int = 1,
Expand Down Expand Up @@ -99,7 +99,7 @@ def __init__(
self.lr = lr
self.sim_coefficient = sim_coefficient
model = create_model(
model_name=model_name if model_name != "not_set" else args[0],
model_name=model_name,
prompt_pool=prompt_pool,
pool_size=pool_size,
prompt_length=prompt_length,
Expand Down Expand Up @@ -131,7 +131,6 @@ def __init__(
)

super().__init__(
legacy_positional_args=args,
model=model,
optimizer=optimizer,
criterion=criterion,
Expand Down
6 changes: 3 additions & 3 deletions avalanche/training/supervised/lamaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@
class LaMAML(SupervisedMetaLearningTemplate):
def __init__(
self,
*args,
model: Module = "not_set",
optimizer: Optimizer = "not_set",
*,
model: Module,
optimizer: Optimizer,
criterion: CriterionType = CrossEntropyLoss(),
n_inner_updates: int = 5,
second_order: bool = True,
Expand Down
6 changes: 3 additions & 3 deletions avalanche/training/supervised/lamaml_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@
class LaMAML(SupervisedMetaLearningTemplate):
def __init__(
self,
*args,
model: Module = "not_set",
optimizer: Optimizer = "not_set",
*,
model: Module,
optimizer: Optimizer,
criterion: CriterionType = CrossEntropyLoss(),
n_inner_updates: int = 5,
second_order: bool = True,
Expand Down
7 changes: 3 additions & 4 deletions avalanche/training/supervised/mer.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ def get_batch(self, x, y, t):
class MER(SupervisedMetaLearningTemplate):
def __init__(
self,
*args,
model: Module = "not_set",
optimizer: Optimizer = "not_set",
*,
model: Module,
optimizer: Optimizer,
criterion: CriterionType = CrossEntropyLoss(),
mem_size=200,
batch_size_mem=10,
Expand Down Expand Up @@ -88,7 +88,6 @@ def __init__(
"""
super().__init__(
legacy_positional_args=args,
model=model,
optimizer=optimizer,
criterion=criterion,
Expand Down
Loading

0 comments on commit 043effa

Please sign in to comment.