Skip to content

Commit 89482bb

Browse files
committed
try fix
1 parent 650c03f commit 89482bb

File tree

4 files changed

+4
-8
lines changed

4 files changed

+4
-8
lines changed

tests/pipelines/cosmos/test_cosmos.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -156,12 +156,11 @@ def test_inference(self):
156156
self.assertEqual(generated_video.shape, (9, 3, 32, 32))
157157

158158
# fmt: off
159-
expected_slice = torch.tensor([0.4525, 0.452, 0.4485, 0.4534, 0.4524, 0.4529, 0.454, 0.453, 0.5127, 0.5326, 0.5204, 0.5253, 0.5439, 0.5424, 0.5133, 0.5078])
159+
expected_slice = torch.tensor([0.0, 0.9686, 0.8549, 0.8078, 0.0, 0.8431, 1.0, 0.4863, 0.7098, 0.1098, 0.8157, 0.4235, 0.6353, 0.2549, 0.5137, 0.5333])
160160
# fmt: on
161161

162162
generated_slice = generated_video.flatten()
163163
generated_slice = torch.cat([generated_slice[:8], generated_slice[-8:]])
164-
print("txt2video:", [round(x, 4) for x in generated_slice.tolist()])
165164
self.assertTrue(torch.allclose(generated_slice, expected_slice, atol=1e-3))
166165

167166
def test_callback_inputs(self):

tests/pipelines/cosmos/test_cosmos2_text2image.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,12 +143,11 @@ def test_inference(self):
143143
self.assertEqual(generated_image.shape, (3, 32, 32))
144144

145145
# fmt: off
146-
expected_slice = torch.tensor([0.4525, 0.452, 0.4485, 0.4534, 0.4524, 0.4529, 0.454, 0.453, 0.5127, 0.5326, 0.5204, 0.5253, 0.5439, 0.5424, 0.5133, 0.5078])
146+
expected_slice = torch.tensor([0.451, 0.451, 0.4471, 0.451, 0.451, 0.451, 0.451, 0.451, 0.4784, 0.4784, 0.4784, 0.4784, 0.4784, 0.4902, 0.4588, 0.5333])
147147
# fmt: on
148148

149149
generated_slice = generated_image.flatten()
150150
generated_slice = torch.cat([generated_slice[:8], generated_slice[-8:]])
151-
print("txt2img:", [round(x, 4) for x in generated_slice.tolist()])
152151
self.assertTrue(torch.allclose(generated_slice, expected_slice, atol=1e-3))
153152

154153
def test_callback_inputs(self):

tests/pipelines/cosmos/test_cosmos2_video2world.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -150,12 +150,11 @@ def test_inference(self):
150150
self.assertEqual(generated_video.shape, (9, 3, 32, 32))
151151

152152
# fmt: off
153-
expected_slice = torch.tensor([0.4525, 0.452, 0.4485, 0.4534, 0.4524, 0.4529, 0.454, 0.453, 0.5127, 0.5326, 0.5204, 0.5253, 0.5439, 0.5424, 0.5133, 0.5078])
153+
expected_slice = torch.tensor([0.451, 0.451, 0.4471, 0.451, 0.451, 0.451, 0.451, 0.451, 0.5098, 0.5137, 0.5176, 0.5098, 0.5255, 0.5412, 0.5098, 0.5059])
154154
# fmt: on
155155

156156
generated_slice = generated_video.flatten()
157157
generated_slice = torch.cat([generated_slice[:8], generated_slice[-8:]])
158-
print("cosmos2video2world:", [round(x, 4) for x in generated_slice.tolist()])
159158
self.assertTrue(torch.allclose(generated_slice, expected_slice, atol=1e-3))
160159

161160
def test_components_function(self):

tests/pipelines/cosmos/test_cosmos_video2world.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -162,12 +162,11 @@ def test_inference(self):
162162
self.assertEqual(generated_video.shape, (9, 3, 32, 32))
163163

164164
# fmt: off
165-
expected_slice = torch.tensor([0.4525, 0.452, 0.4485, 0.4534, 0.4524, 0.4529, 0.454, 0.453, 0.5127, 0.5326, 0.5204, 0.5253, 0.5439, 0.5424, 0.5133, 0.5078])
165+
expected_slice = torch.tensor([0.0, 0.8275, 0.7529, 0.7294, 0.0, 0.6, 1.0, 0.3804, 0.6667, 0.0863, 0.8784, 0.5922, 0.6627, 0.2784, 0.5725, 0.7765])
166166
# fmt: on
167167

168168
generated_slice = generated_video.flatten()
169169
generated_slice = torch.cat([generated_slice[:8], generated_slice[-8:]])
170-
print("vid2world:", [round(x, 4) for x in generated_slice.tolist()])
171170
self.assertTrue(torch.allclose(generated_slice, expected_slice, atol=1e-3))
172171

173172
def test_components_function(self):

0 commit comments

Comments
 (0)