|
2 | 2 | /* globals apiClients */ |
3 | 3 | 'use strict' |
4 | 4 |
|
5 | | -const expect = require('chai').expect |
6 | | -const Readable = require('stream').Readable |
7 | | -const path = require('path') |
8 | | -const isNode = require('detect-node') |
9 | | -const fs = require('fs') |
10 | | -const bs58 = require('bs58') |
11 | | - |
12 | | -const testfileBig = fs.readFileSync(path.join(__dirname, '/../15mb.random')) |
13 | | -const testfile = fs.readFileSync(path.join(__dirname, '/../testfile.txt')) |
14 | | - |
15 | | -describe('.add', () => { |
16 | | - it('add buffer as tuple', (done) => { |
17 | | - if (!isNode) { |
18 | | - return done() |
19 | | - } |
20 | | - |
21 | | - const file = { |
22 | | - path: 'testfile.txt', |
23 | | - content: new Buffer(testfile) |
24 | | - } |
25 | | - |
26 | | - apiClients.a.files.add([file], (err, res) => { |
27 | | - expect(err).to.not.exist |
28 | | - |
29 | | - const added = res[0] != null ? res[0] : res |
30 | | - const mh = bs58.encode(added.multihash()).toString() |
31 | | - expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') |
32 | | - expect(added.links).to.have.length(0) |
33 | | - done() |
34 | | - }) |
35 | | - }) |
36 | | - |
37 | | - it('add buffer', (done) => { |
38 | | - let buf = new Buffer(testfile) |
39 | | - apiClients.a.files.add(buf, (err, res) => { |
40 | | - expect(err).to.not.exist |
41 | | - |
42 | | - expect(res).to.have.length(1) |
43 | | - const mh = bs58.encode(res[0].multihash()).toString() |
44 | | - expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') |
45 | | - expect(res[0].links).to.have.length(0) |
46 | | - done() |
47 | | - }) |
48 | | - }) |
49 | | - |
50 | | - it('add BIG buffer', (done) => { |
51 | | - if (!isNode) { |
52 | | - return done() |
53 | | - } |
54 | | - |
55 | | - apiClients.a.files.add(testfileBig, (err, res) => { |
56 | | - expect(err).to.not.exist |
57 | | - |
58 | | - expect(res).to.have.length(1) |
59 | | - const mh = bs58.encode(res[0].multihash()).toString() |
60 | | - expect(mh).to.equal('Qmcx5werSWQPdrGVap7LARHB4QUSPRPJwxhFuHvdoXqQXT') |
61 | | - expect(res[0].links).to.have.length(58) |
62 | | - done() |
63 | | - }) |
64 | | - }) |
65 | | - |
66 | | - it('local fs: add file', (done) => { |
67 | | - if (!isNode) { |
68 | | - return done() |
69 | | - } |
70 | | - |
71 | | - apiClients.a.add(path.join(__dirname, '/../testfile.txt'), (err, res) => { |
72 | | - expect(err).to.not.exist |
73 | | - |
74 | | - const added = res[0] != null ? res[0] : res |
75 | | - const mh = bs58.encode(added.multihash()).toString() |
76 | | - expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') |
77 | | - expect(added.links).to.have.length(0) |
78 | | - done() |
79 | | - }) |
80 | | - }) |
81 | | - |
82 | | - it('local fs: add nested dir (follow symlinks)', (done) => { |
83 | | - apiClients.a.util.addFiles(path.join(__dirname, '/../test-folder'), { recursive: true }, (err, res) => { |
84 | | - if (isNode) { |
85 | | - expect(err).to.not.exist |
86 | | - |
87 | | - const added = res[res.length - 1] |
88 | | - const mh = bs58.encode(added.multihash()).toString() |
89 | | - expect(mh).to.equal('QmRNjDeKStKGTQXnJ2NFqeQ9oW23WcpbmvCVrpDHgDg3T6') |
90 | | - expect(added.links).to.have.length(7) |
91 | | - |
92 | | - done() |
93 | | - } else { |
94 | | - expect(err.message).to.be.equal('Recursive uploads are not supported in the browser') |
95 | | - done() |
96 | | - } |
97 | | - }) |
98 | | - }) |
99 | | - |
100 | | - it('local fs: add nested dir (don\'t follow symlinks)', (done) => { |
101 | | - apiClients.a.util.addFiles(path.join(__dirname, '/../test-folder'), { recursive: true, followSymlinks: false }, (err, res) => { |
102 | | - if (isNode) { |
103 | | - expect(err).to.not.exist |
104 | | - |
105 | | - const added = res[res.length - 1] |
106 | | - // same hash as the result from the cli (ipfs add test/test-folder -r) |
107 | | - const mh = bs58.encode(added.multihash()).toString() |
108 | | - expect(mh).to.equal('QmRArDYd8Rk7Zb7K2699KqmQM1uUoejn1chtEAcqkvjzGg') |
109 | | - expect(added.links).to.have.length(7) |
110 | | - done() |
111 | | - } else { |
112 | | - expect(err.message).to.be.equal('Recursive uploads are not supported in the browser') |
113 | | - done() |
114 | | - } |
115 | | - }) |
116 | | - }) |
117 | | - |
118 | | - it('add a nested dir as array', (done) => { |
119 | | - if (!isNode) { |
120 | | - return done() |
121 | | - } |
122 | | - const base = path.join(__dirname, '../test-folder') |
123 | | - const content = (name) => ({ |
124 | | - path: `test-folder/${name}`, |
125 | | - content: fs.readFileSync(path.join(base, name)) |
126 | | - }) |
127 | | - const dirs = [ |
128 | | - content('add.js'), |
129 | | - content('cat.js'), |
130 | | - content('ls.js'), |
131 | | - content('ipfs-add.js'), |
132 | | - content('version.js'), |
133 | | - content('files/hello.txt'), |
134 | | - content('files/ipfs.txt'), |
135 | | - { |
136 | | - path: 'test-folder', |
137 | | - dir: true |
138 | | - } |
139 | | - ] |
140 | | - |
141 | | - apiClients.a.files.add(dirs, { recursive: true }, (err, res) => { |
142 | | - expect(err).to.not.exist |
143 | | - |
144 | | - const added = res[res.length - 1] |
145 | | - const mh = bs58.encode(added.multihash()).toString() |
146 | | - expect(mh).to.equal('QmTDH2RXGn8XyDAo9YyfbZAUXwL1FCr44YJCN9HBZmL9Gj') |
147 | | - expect(added.links).to.have.length(6) |
148 | | - done() |
149 | | - }) |
150 | | - }) |
151 | | - |
152 | | - it('add stream', (done) => { |
153 | | - const stream = new Readable() |
154 | | - stream.push('Hello world') |
155 | | - stream.push(null) |
156 | | - |
157 | | - apiClients.a.files.add(stream, (err, res) => { |
158 | | - expect(err).to.not.exist |
159 | | - |
160 | | - const added = res[0] != null ? res[0] : res |
161 | | - const mh = bs58.encode(added.multihash()).toString() |
162 | | - expect(mh).to.equal('QmNRCQWfgze6AbBCaT1rkrkV5tJ2aP4oTNPb5JZcXYywve') |
163 | | - expect(added.links).to.have.length(0) |
164 | | - done() |
165 | | - }) |
166 | | - }) |
167 | | - |
168 | | - it('add url', (done) => { |
169 | | - const url = 'https://raw.githubusercontent.com/ipfs/js-ipfs-api/2a9cc63d7427353f2145af6b1a768a69e67c0588/README.md' |
170 | | - apiClients.a.util.addUrl(url, (err, res) => { |
171 | | - expect(err).to.not.exist |
172 | | - |
173 | | - const added = res[0] != null ? res[0] : res |
174 | | - const mh = bs58.encode(added.multihash()).toString() |
175 | | - expect(mh).to.equal('QmRzvSX35JpzQ2Lyn55r3YwWqdVP6PPxYHFpiWpwQTff8A') |
176 | | - expect(added.links).to.have.length(0) |
177 | | - done() |
178 | | - }) |
179 | | - }) |
180 | | - |
181 | | - describe('promise', () => { |
182 | | - it('add buffer', () => { |
183 | | - let buf = new Buffer(testfile) |
184 | | - return apiClients.a.files.add(buf) |
185 | | - .then((res) => { |
186 | | - const added = res[0] != null ? res[0] : res |
187 | | - const mh = bs58.encode(added.multihash()).toString() |
188 | | - expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') |
189 | | - expect(added.links).to.have.length(0) |
190 | | - }) |
191 | | - }) |
192 | | - }) |
193 | | -}) |
| 5 | +const test = require('interface-ipfs-core') |
| 6 | + |
| 7 | +const common = { |
| 8 | + setup: function (cb) { |
| 9 | + cb(null, apiClients.a) |
| 10 | + }, |
| 11 | + teardown: function (cb) { |
| 12 | + cb() |
| 13 | + } |
| 14 | +} |
| 15 | + |
| 16 | +test.files(common) |
0 commit comments