-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest_storage.py
More file actions
274 lines (226 loc) · 13.7 KB
/
test_storage.py
File metadata and controls
274 lines (226 loc) · 13.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
""" Testing Storage functions for Sunet Drive including users and buckets
Author: Richard Freitag <freitag@sunet.se>
"""
import unittest
import json
import os
import yaml
import sunetnextcloud
g_testtarget = os.environ.get('NextcloudTestTarget')
drv = sunetnextcloud.TestTarget(g_testtarget)
repobase='sunet-drive-ops/'
expectedResultsFile = 'expected.yaml'
class TestStorage(unittest.TestCase):
with open(expectedResultsFile, "r") as stream:
expectedResults=yaml.safe_load(stream)
def test_existingbuckets(self):
premotes=os.popen('rclone listremotes')
for remote in premotes.read().splitlines():
print('Read: ',remote)
pbuckets=os.popen('rclone lsjson ' + remote)
buckets=pbuckets.read()
print('Buckets: ', buckets)
def test_fullnodestoragelocation(self):
drv = sunetnextcloud.TestTarget(g_testtarget)
for fullnode in drv.fullnodes:
with self.subTest(nodetotest=fullnode):
configfile = repobase + fullnode + "-common/overlay/etc/hiera/data/group.yaml"
with open(configfile, "r") as stream:
data=yaml.safe_load(stream)
self.assertEqual(data[drv.target]["s3_host"],self.expectedResults['storage']['mainStorageLocation'])
def test_multinodestoragelocation(self):
print('Test target: ', g_testtarget)
drv = sunetnextcloud.TestTarget(g_testtarget)
with open(repobase + "multinode-common/overlay/etc/hiera/data/group.yaml", "r") as stream:
data=yaml.safe_load(stream)
for multinode in drv.multinodes:
with self.subTest(nodetotest=multinode):
print(multinode)
self.assertEqual(data[multinode][drv.target]["s3_host"],self.expectedResults['storage']['mainStorageLocation'])
# Test if the primary and the mirror bucket exist at the right location
def test_fullnode_primarybackupmirrorbuckets(self):
drv = sunetnextcloud.TestTarget(g_testtarget)
for fullnode in drv.fullnodes:
with self.subTest(nodetotest=fullnode):
globalconfigfile = repobase + "/global/overlay/etc/hiera/data/common.yaml"
with open(globalconfigfile, "r") as stream:
data=yaml.safe_load(stream)
prj=data["project_mapping"][fullnode][drv.target]
primary_project=prj['primary_project']
primary_bucket=prj['primary_bucket']
mirror_project=prj['mirror_project']
mirror_bucket=primary_bucket+'-mirror'
primarycmd='rclone lsjson ' + primary_project + ':'
mirrorcmd='rclone lsjson ' + mirror_project + ':'
pprimary_buckets=os.popen(primarycmd)
primary_buckets=json.loads(pprimary_buckets.read())
pprimary_buckets.close()
primary_bucket_found=False
for entry in primary_buckets:
if entry['Name'] == primary_bucket:
primary_bucket_found=True
if not primary_bucket_found:
print('Primary project: ', primary_project, '\t Primary bucket found: ', primary_bucket, ' - ', primary_bucket_found)
pmirror_buckets=os.popen(mirrorcmd)
mirror_buckets=json.loads(pmirror_buckets.read())
pmirror_buckets.close()
mirror_bucket_found=False
for entry in mirror_buckets:
if entry['Name'] == mirror_bucket:
mirror_bucket_found=True
if not mirror_bucket_found:
print('Mirror project: ', mirror_project, '\t Mirror bucket found: ', mirror_bucket, ' - ', mirror_bucket_found)
print('Mirror bucket found: ', mirror_bucket, ' - ', mirror_bucket_found)
self.assertTrue(primary_bucket_found)
self.assertTrue(mirror_bucket_found)
# Test if the primary and the mirror bucket exist at the right location
def test_multinode_primarybackupmirrorbuckets(self):
drv = sunetnextcloud.TestTarget(g_testtarget)
for fullnode in drv.multinodes:
with self.subTest(nodetotest=fullnode):
globalconfigfile = repobase + "/global/overlay/etc/hiera/data/common.yaml"
with open(globalconfigfile, "r") as stream:
data=yaml.safe_load(stream)
prj=data["project_mapping"][fullnode][drv.target]
primary_project=prj['primary_project']
primary_bucket=prj['primary_bucket']
mirror_project=prj['mirror_project']
mirror_bucket=primary_bucket+'-mirror'
primarycmd='rclone lsjson ' + primary_project + ':'
mirrorcmd='rclone lsjson ' + mirror_project + ':'
pprimary_buckets=os.popen(primarycmd)
primary_buckets=json.loads(pprimary_buckets.read())
pprimary_buckets.close()
primary_bucket_found=False
for entry in primary_buckets:
if entry['Name'] == primary_bucket:
primary_bucket_found=True
if not primary_bucket_found:
print('Primary project: ', primary_project, '\t Primary bucket found: ', primary_bucket, ' - ', primary_bucket_found)
pmirror_buckets=os.popen(mirrorcmd)
mirror_buckets=json.loads(pmirror_buckets.read())
pmirror_buckets.close()
mirror_bucket_found=False
for entry in mirror_buckets:
if entry['Name'] == mirror_bucket:
mirror_bucket_found=True
if not mirror_bucket_found:
print('Mirror project: ', mirror_project, '\t Mirror bucket found: ', mirror_bucket, ' - ', mirror_bucket_found)
print('Mirror bucket found: ', mirror_bucket, ' - ', mirror_bucket_found)
self.assertTrue(primary_bucket_found)
self.assertTrue(mirror_bucket_found)
# Test if the number of buckets in the mirror project is the same in Sto4 and Sto3
def test_project_mapping_primary_bucket_number(self):
drv = sunetnextcloud.TestTarget(g_testtarget)
for fullnode in drv.nodestotest:
with self.subTest(nodetotest=fullnode):
globalconfigfile = repobase + "/global/overlay/etc/hiera/data/common.yaml"
with open(globalconfigfile, "r") as stream:
data=yaml.safe_load(stream)
prj=data["project_mapping"][fullnode][drv.target]
primary_project=prj['primary_project']
# primary_bucket=prj['primary_bucket'] # TODO: Check primary bucket
mirror_project=prj['mirror_project']
# mirror_bucket=primary_bucket+'-mirror' # TODO: Check primary mirror bucket
primarycmd='rclone lsjson ' + primary_project + ':'
mirrorcmd='rclone lsjson ' + mirror_project + ':'
pprimary_buckets=os.popen(primarycmd)
primary_buckets=json.loads(pprimary_buckets.read())
pprimary_buckets.close()
pmirror_buckets=os.popen(mirrorcmd)
mirror_buckets=json.loads(pmirror_buckets.read())
pmirror_buckets.close()
# -1 because of db-backup-bucket
self.assertEqual(len(primary_buckets),(len(mirror_buckets)-1))
# Test project buckets for consistency: Name, number of buckets, mirror bucket
def test_fullnode_projectbucketconsistency(self):
drv = sunetnextcloud.TestTarget(g_testtarget)
for fullnode in drv.fullnodes:
with self.subTest(nodetotest=fullnode):
globalconfigfile = repobase + "/global/overlay/etc/hiera/data/common.yaml"
with open(globalconfigfile, "r") as stream:
data=yaml.safe_load(stream)
assigned=data["project_mapping"][fullnode][drv.target]["assigned"]
print(fullnode + " " + str(len(assigned)))
if len(assigned) > 0:
print(fullnode + " " + str(len(assigned)))
for buckets in assigned:
# print(buckets)
# print(buckets["buckets"])
primary_project_bucket=buckets["buckets"][0]
primary_project=buckets["project"]
mirror_project=buckets["mirror_project"]
mirror_project_bucket=primary_project_bucket+'-mirror'
primarycmd='rclone lsjson ' + primary_project + ':'
mirrorcmd='rclone lsjson ' + mirror_project + ':'
pprimary_buckets=os.popen(primarycmd)
primary_buckets=json.loads(pprimary_buckets.read())
pprimary_buckets.close()
primary_project_bucket_found=False
for entry in primary_buckets:
if entry['Name'] == primary_project_bucket:
primary_project_bucket_found=True
pmirror_buckets=os.popen(mirrorcmd)
mirror_buckets=json.loads(pmirror_buckets.read())
pmirror_buckets.close()
mirror_project_bucket_found=False
for entry in mirror_buckets:
if entry['Name'] == mirror_project_bucket:
mirror_project_bucket_found=True
print('Project bucket found: ', primary_project_bucket, ' - ', primary_project_bucket_found)
print('Mirror bucket found: ', mirror_project_bucket, ' - ', mirror_project_bucket_found)
self.assertTrue(primary_project_bucket_found)
self.assertTrue(mirror_project_bucket_found)
# Test project buckets for consistency: Name, number of buckets, mirror bucket
def test_multinode_projectbucketconsistency(self):
drv = sunetnextcloud.TestTarget(g_testtarget)
for fullnode in drv.multinodes:
with self.subTest(nodetotest=fullnode):
globalconfigfile = repobase + "/global/overlay/etc/hiera/data/common.yaml"
with open(globalconfigfile, "r") as stream:
data=yaml.safe_load(stream)
assigned=data["project_mapping"][fullnode][drv.target]["assigned"]
print(fullnode + " " + str(len(assigned)))
if len(assigned) > 0:
print(fullnode + " " + str(len(assigned)))
for buckets in assigned:
# print(buckets)
# print(buckets["buckets"])
primary_project_bucket=buckets["buckets"][0]
primary_project=buckets["project"]
mirror_project=buckets["mirror_project"]
mirror_project_bucket=primary_project_bucket+'-mirror'
primarycmd='rclone lsjson ' + primary_project + ':'
mirrorcmd='rclone lsjson ' + mirror_project + ':'
pprimary_buckets=os.popen(primarycmd)
primary_buckets=json.loads(pprimary_buckets.read())
pprimary_buckets.close()
primary_project_bucket_found=False
for entry in primary_buckets:
if entry['Name'] == primary_project_bucket:
primary_project_bucket_found=True
pmirror_buckets=os.popen(mirrorcmd)
mirror_buckets=json.loads(pmirror_buckets.read())
pmirror_buckets.close()
mirror_project_bucket_found=False
for entry in mirror_buckets:
if entry['Name'] == mirror_project_bucket:
mirror_project_bucket_found=True
print('Project bucket found: ', primary_project_bucket, ' - ', primary_project_bucket_found)
print('Mirror bucket found: ', mirror_project_bucket, ' - ', mirror_project_bucket_found)
self.assertTrue(primary_project_bucket_found)
self.assertTrue(mirror_project_bucket_found)
# Test if access to storage report folder works
def test_storagereport(self):
rclonecmd='rclone lsjson sunet-nextcloud:'
prclonecmd=os.popen(rclonecmd)
folderlist=json.loads(prclonecmd.read())
prclonecmd.close()
customerShareFound=False
for folder in folderlist:
# print(folder['Path'])
if folder['Path'] == "DriveCustomerShare":
customerShareFound=True
self.assertTrue(customerShareFound)
if __name__ == '__main__':
drv.run_tests(os.path.basename(__file__))