We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent d6a45e1 commit 065e4aaCopy full SHA for 065e4aa
autointent/_dump_tools.py
@@ -67,7 +67,7 @@ def make_subdirectories(path: Path, exists_ok: bool = False) -> None:
67
path / Dumper.hf_models,
68
path / Dumper.hf_tokenizers,
69
path / Dumper.torch_models,
70
- path / Dumper.containers
+ path / Dumper.containers,
71
path / Dumper.ptuning_models,
72
]
73
for subdir in subdirectories:
autointent/modules/scoring/_cnn/textcnn.py
@@ -75,12 +75,12 @@ def load(self, model_path: str) -> None:
75
76
def get_config(self) -> dict:
77
return {
78
- 'vocab_size': self.vocab_size.item(),
79
- 'n_classes': self.n_classes.item(),
80
- 'embed_dim': self.embed_dim.item(),
81
- 'kernel_sizes': self.kernel_sizes.tolist(),
82
- 'num_filters': self.num_filters.item(),
83
- 'dropout': self.dropout_rate.item(),
84
- 'padding_idx': self.padding_idx.item(),
85
- 'pretrained_embs': self.pretrained_embs,
86
- }
+ "vocab_size": self.vocab_size.item(),
+ "n_classes": self.n_classes.item(),
+ "embed_dim": self.embed_dim.item(),
+ "kernel_sizes": self.kernel_sizes.tolist(),
+ "num_filters": self.num_filters.item(),
+ "dropout": self.dropout_rate.item(),
+ "padding_idx": self.padding_idx.item(),
+ "pretrained_embs": self.pretrained_embs,
+ }
0 commit comments