|
35 | 35 | expect(index.documents.count).to eq(documents.count) |
36 | 36 | end |
37 | 37 |
|
| 38 | + it 'adds documents synchronously (as an array of documents)' do |
| 39 | + response = index.add_documents!(documents) |
| 40 | + expect(response).to be_a(Hash) |
| 41 | + expect(response).to have_key('updateId') |
| 42 | + expect(response).to have_key('status') |
| 43 | + expect(response['status']).not_to eql('enqueued') |
| 44 | + expect(response['status']).to eql('processed') |
| 45 | + expect(index.documents.count).to eq(documents.count) |
| 46 | + end |
| 47 | + |
38 | 48 | it 'infers order of fields' do |
39 | 49 | response = index.document(1) |
40 | 50 | expect(response.keys).to eq(['objectId', 'title', 'comment']) |
|
99 | 109 | expect(doc2['comment']).to eq(documents.detect { |doc| doc[:objectId] == id2 }[:comment]) |
100 | 110 | end |
101 | 111 |
|
| 112 | + it 'updates documents synchronously in index (as an array of documents)' do |
| 113 | + id1 = 123 |
| 114 | + id2 = 456 |
| 115 | + updated_documents = [ |
| 116 | + { objectId: id1, title: 'Sense and Sensibility' }, |
| 117 | + { objectId: id2, title: 'The Little Prince' } |
| 118 | + ] |
| 119 | + response = index.update_documents!(updated_documents) |
| 120 | + expect(response).to be_a(Hash) |
| 121 | + expect(response).to have_key('updateId') |
| 122 | + expect(response).to have_key('status') |
| 123 | + expect(response['status']).not_to eql('enqueued') |
| 124 | + expect(response['status']).to eql('processed') |
| 125 | + doc1 = index.document(id1) |
| 126 | + doc2 = index.document(id2) |
| 127 | + expect(index.documents.count).to eq(documents.count) |
| 128 | + expect(doc1['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id1 }[:title]) |
| 129 | + expect(doc1['comment']).to eq(documents.detect { |doc| doc[:objectId] == id1 }[:comment]) |
| 130 | + expect(doc2['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id2 }[:title]) |
| 131 | + expect(doc2['comment']).to eq(documents.detect { |doc| doc[:objectId] == id2 }[:comment]) |
| 132 | + end |
| 133 | + |
102 | 134 | it 'updates one document in index (as an hash of one document)' do |
103 | 135 | id = 123 |
104 | 136 | updated_document = { objectId: id, title: 'Emma' } |
|
112 | 144 | expect(new_doc['comment']).to eq(documents.detect { |doc| doc[:objectId] == id }[:comment]) |
113 | 145 | end |
114 | 146 |
|
| 147 | + it 'updates one document synchronously in index (as an hash of one document)' do |
| 148 | + id = 123 |
| 149 | + updated_document = { objectId: id, title: 'Emma' } |
| 150 | + response = index.update_documents!(updated_document) |
| 151 | + expect(response).to be_a(Hash) |
| 152 | + expect(response).to have_key('updateId') |
| 153 | + expect(response).to have_key('status') |
| 154 | + expect(response['status']).not_to eql('enqueued') |
| 155 | + expect(response['status']).to eql('processed') |
| 156 | + expect(index.documents.count).to eq(documents.count) |
| 157 | + new_doc = index.document(id) |
| 158 | + expect(new_doc['title']).to eq(updated_document[:title]) |
| 159 | + expect(new_doc['comment']).to eq(documents.detect { |doc| doc[:objectId] == id }[:comment]) |
| 160 | + end |
| 161 | + |
115 | 162 | it 'adds only one document to index (as an hash of one document)' do |
116 | 163 | id = 30 |
117 | 164 | title = 'Hamlet' |
|
126 | 173 | index.wait_for_pending_update(response['updateId']) |
127 | 174 | end |
128 | 175 |
|
| 176 | + it 'adds only one document synchronously to index (as an hash of one document)' do |
| 177 | + id = 30 |
| 178 | + title = 'Hamlet' |
| 179 | + new_doc = { objectId: id, title: title } |
| 180 | + response = index.add_documents!(new_doc) |
| 181 | + expect(response).to be_a(Hash) |
| 182 | + expect(response).to have_key('updateId') |
| 183 | + expect(response).to have_key('status') |
| 184 | + expect(response['status']).not_to eql('enqueued') |
| 185 | + expect(response['status']).to eql('processed') |
| 186 | + expect(index.documents.count).to eq(documents.count + 1) |
| 187 | + expect(index.document(id)['title']).to eq(title) |
| 188 | + response = index.delete_document(id) |
| 189 | + index.wait_for_pending_update(response['updateId']) |
| 190 | + end |
| 191 | + |
129 | 192 | it 'update a document with new fields' do |
130 | 193 | id = 2 |
131 | 194 | doc = { objectId: id, note: '8/10' } |
|
163 | 226 | expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error |
164 | 227 | end |
165 | 228 |
|
| 229 | + it 'deletes one document synchronously from index' do |
| 230 | + id = 456 |
| 231 | + response = index.delete_document!(id) |
| 232 | + expect(response).to be_a(Hash) |
| 233 | + expect(response).to have_key('updateId') |
| 234 | + expect(response).to have_key('status') |
| 235 | + expect(response['status']).not_to eql('enqueued') |
| 236 | + expect(response['status']).to eql('processed') |
| 237 | + expect(index.documents.size).to eq(documents.count - 1) |
| 238 | + expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error |
| 239 | + end |
| 240 | + |
166 | 241 | it 'does nothing when trying to delete a document which does not exist' do |
167 | 242 | id = 111 |
168 | 243 | response = index.delete_document(id) |
|
183 | 258 | expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error |
184 | 259 | end |
185 | 260 |
|
| 261 | + it 'deletes one document synchronously from index (with delete-batch route)' do |
| 262 | + id = 2 |
| 263 | + response = index.delete_documents!(id) |
| 264 | + expect(response).to be_a(Hash) |
| 265 | + expect(response).to have_key('updateId') |
| 266 | + expect(response).to have_key('status') |
| 267 | + expect(response['status']).not_to eql('enqueued') |
| 268 | + expect(response['status']).to eql('processed') |
| 269 | + expect(index.documents.size).to eq(documents.count - 2) |
| 270 | + expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error |
| 271 | + end |
| 272 | + |
186 | 273 | it 'deletes one document from index (with delete-batch route as an array of one uid)' do |
187 | 274 | id = 123 |
188 | 275 | response = index.delete_documents([id]) |
|
193 | 280 | expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error |
194 | 281 | end |
195 | 282 |
|
| 283 | + it 'deletes one document synchronously from index (with delete-batch route as an array of one uid)' do |
| 284 | + id = 123 |
| 285 | + response = index.delete_documents!([id]) |
| 286 | + expect(response).to be_a(Hash) |
| 287 | + expect(response).to have_key('updateId') |
| 288 | + expect(response).to have_key('status') |
| 289 | + expect(response['status']).not_to eql('enqueued') |
| 290 | + expect(response['status']).to eql('processed') |
| 291 | + expect(index.documents.size).to eq(documents.count - 3) |
| 292 | + expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error |
| 293 | + end |
| 294 | + |
196 | 295 | it 'deletes multiples documents from index' do |
197 | 296 | docs_to_delete = [1, 4] |
198 | 297 | response = index.delete_documents(docs_to_delete) |
|
202 | 301 | expect(index.documents.size).to eq(documents.count - 3 - docs_to_delete.count) |
203 | 302 | end |
204 | 303 |
|
| 304 | + it 'deletes multiples documents synchronously from index' do |
| 305 | + docs_to_delete = [1, 4] |
| 306 | + response = index.delete_documents!(docs_to_delete) |
| 307 | + expect(response).to be_a(Hash) |
| 308 | + expect(response).to have_key('updateId') |
| 309 | + expect(response).to have_key('status') |
| 310 | + expect(response['status']).not_to eql('enqueued') |
| 311 | + expect(response['status']).to eql('processed') |
| 312 | + expect(index.documents.size).to eq(documents.count - 3 - docs_to_delete.count) |
| 313 | + end |
| 314 | + |
205 | 315 | it 'clears all documents from index' do |
206 | 316 | response = index.delete_all_documents |
207 | 317 | index.wait_for_pending_update(response['updateId']) |
|
211 | 321 | expect(index.documents.size).to eq(0) |
212 | 322 | end |
213 | 323 |
|
| 324 | + it 'clears all documents synchronously from index' do |
| 325 | + response = index.delete_all_documents! |
| 326 | + expect(response).to be_a(Hash) |
| 327 | + expect(response).to have_key('updateId') |
| 328 | + expect(response).to have_key('status') |
| 329 | + expect(response['status']).not_to eql('enqueued') |
| 330 | + expect(response['status']).to eql('processed') |
| 331 | + expect(index.documents).to be_empty |
| 332 | + expect(index.documents.size).to eq(0) |
| 333 | + end |
| 334 | + |
214 | 335 | it 'fails to add document with bad primary-key format' do |
215 | 336 | response = index.add_documents(objectId: 'toto et titi', title: 'Unknown') |
216 | 337 | index.wait_for_pending_update(response['updateId']) |
|
0 commit comments