8
8
from __future__ import division as _ # noqa
9
9
from __future__ import absolute_import as _ # noqa
10
10
from ..data_structures .sframe import SFrame
11
+ from ..data_structures .sarray import SArray
11
12
from turicreate .util import _assert_sframe_equal
12
13
13
14
import tempfile
22
23
# 64MB is the cache block size. The big sframe with 77MB is used to
23
24
# ensure there's no issues when crossing different cache blocks.
24
25
remote_sframe_folders = ["small_sframe_dc" , "medium_sframe_ac" , "big_sframe_od" ]
26
+ remote_sarray_folders = ["tiny_array" ]
25
27
26
28
27
29
@pytest .mark .skipif (
@@ -40,6 +42,7 @@ def setup_class(self):
40
42
self .bucket = "tc_qa"
41
43
self .s3_root_prefix = "integration/manual/"
42
44
self .s3_sframe_prefix = os .path .join (self .s3_root_prefix , "sframes/" )
45
+ self .s3_sarray_prefix = os .path .join (self .s3_root_prefix , "sarrays/" )
43
46
44
47
# download all related files once
45
48
self .downloaded_files = dict ()
@@ -105,6 +108,13 @@ def test_s3_sframe_download(self, folder):
105
108
sf_from_s3 = SFrame (s3_url )
106
109
_assert_sframe_equal (sf_from_disk , sf_from_s3 )
107
110
111
+ @pytest .mark .parametrize ("folder" , remote_sarray_folders )
112
+ def test_s3_sarray_download (self , folder ):
113
+ s3_url = os .path .join ("s3://" , self .bucket , self .s3_sarray_prefix , folder )
114
+ array = SArray (s3_url )
115
+ assert len (array ) == 1
116
+ assert array [0 ] == 1
117
+
108
118
@pytest .mark .parametrize ("folder" , remote_sframe_folders )
109
119
def test_s3_sframe_upload (self , folder ):
110
120
# s3 only writes when it receives all parts
0 commit comments