IO

if “CHAINER” in get_backends():

from delira.io.chainer import save_checkpoint as chainer_save_checkpoint from delira.io.chainer import load_checkpoint as chainer_load_checkpoint

if “SKLEARN” in get_backends():

from delira.io.sklearn import load_checkpoint as sklearn_load_checkpoint from delira.io.sklearn import save_checkpoint as sklearn_save_checkpoint

torch_load_checkpoint

torch_load_checkpoint(file, **kwargs)

Loads a saved model

Parameters
  • file (str) – filepath to a file containing a saved model

  • **kwargs – Additional keyword arguments (passed to torch.load) Especially “map_location” is important to change the device the state_dict should be loaded to

Returns

checkpoint state_dict

Return type

OrderedDict

torch_save_checkpoint

torch_save_checkpoint(file: str, model=None, optimizers=None, epoch=None, **kwargs)

Save checkpoint

Parameters
  • file (str) – filepath the model should be saved to

  • model (AbstractNetwork or None) – the model which should be saved if None: empty dict will be saved as state dict

  • optimizers (dict) – dictionary containing all optimizers

  • epoch (int) – current epoch (will also be pickled)

torchscript_load_checkpoint

torchscript_load_checkpoint(file: str, **kwargs)

Loads a saved checkpoint consisting of 2 files (see save_checkpoint_jit() for details)

Parameters
  • file (str) – filepath to a file containing a saved model

  • **kwargs – Additional keyword arguments (passed to torch.load) Especially “map_location” is important to change the device the state_dict should be loaded to

Returns

checkpoint state_dict

Return type

OrderedDict

torchscript_save_checkpoint

torchscript_save_checkpoint(file: str, model=None, optimizers=None, epoch=None, **kwargs)
Save current checkpoint to two different files:
1.) file + "_model.ptj": Will include the state of the model

(including the graph; this is the opposite to save_checkpoint())

2.) file + "_trainer_state.pt": Will include the states of all

optimizers and the current epoch (if given)

Parameters
  • file (str) – filepath the model should be saved to

  • model (AbstractPyTorchJITNetwork or None) – the model which should be saved if None: empty dict will be saved as state dict

  • optimizers (dict) – dictionary containing all optimizers

  • epoch (int) – current epoch (will also be pickled)

tf_load_checkpoint

tf_load_checkpoint(file: str, model=None)

Loads a saved model

Parameters
  • file (str) – filepath to a file containing a saved model

  • model (TfNetwork) – the model which should be loaded

tf_save_checkpoint

tf_save_checkpoint(file: str, model=None)

Save model’s parameters contained in it’s graph

Parameters
  • file (str) – filepath the model should be saved to

  • model (TfNetwork) – the model which should be saved

tf_eager_load_checkpoint

tf_eager_load_checkpoint(file, model: delira.models.backends.tf_eager.abstract_network.AbstractTfEagerNetwork = None, optimizer: Dict[str, tensorflow.train.Optimizer] = None)

tf_eager_save_checkpoint

tf_eager_save_checkpoint(file, model: delira.models.backends.tf_eager.abstract_network.AbstractTfEagerNetwork = None, optimizer: Dict[str, tensorflow.train.Optimizer] = None, epoch=None)

chainer_load_checkpoint

chainer_load_checkpoint(file, old_state: dict = None, model: chainer.link.Link = None, optimizers: dict = None)

Loads a state from a given file

Parameters
  • file (str) – string containing the path to the file containing the saved state

  • old_state (dict) – dictionary containing the modules to load the states to

  • model (chainer.link.Link) – the model the state should be loaded to; overwrites the model key in old_state if not None

  • optimizers (dict) – dictionary containing all optimizers. overwrites the optimizers key in old_state if not None

Returns

the loaded state

Return type

dict

chainer_save_checkpoint

chainer_save_checkpoint(file, model=None, optimizers=None, epoch=None)

Saves the given checkpoint

Parameters
  • file (str) – string containing the path, the state should be saved to

  • model (AbstractChainerNetwork) –

  • optimizers (dict) – dictionary containing all optimizers

  • epoch (int) – the current epoch

sklearn_load_checkpoint

sklearn_load_checkpoint(file, **kwargs)

Loads a saved model

Parameters
  • file (str) – filepath to a file containing a saved model

  • **kwargs – Additional keyword arguments (passed to torch.load) Especially “map_location” is important to change the device the state_dict should be loaded to

Returns

checkpoint state_dict

Return type

OrderedDict

sklearn_save_checkpoint

sklearn_save_checkpoint(file: str, model=None, epoch=None, **kwargs)

Save model’s parameters

Parameters
  • file (str) – filepath the model should be saved to

  • model (AbstractNetwork or None) – the model which should be saved if None: empty dict will be saved as state dict

  • epoch (int) – current epoch (will also be pickled)