We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f094cd3 commit e48fd13Copy full SHA for e48fd13
iotdb-core/ainode/ainode/core/manager/inference_manager.py
@@ -59,7 +59,7 @@ def infer(self, full_data, predict_length=96, **_):
59
data = data.byteswap().newbyteorder()
60
seqs = torch.tensor(data).unsqueeze(0).float()
61
# TODO: unify model inference input
62
- output = self.model.generate(seqs, max_new_tokens=predict_length)
+ output = self.model.generate(seqs, max_new_tokens=predict_length, revin=True)
63
df = pd.DataFrame(output[0])
64
return convert_to_binary(df)
65
0 commit comments