Skip to content

Commit 214256d

Browse files
committed
Moved csv functions and fix doctest
Signed-off-by: carlosb1 <[email protected]>
1 parent 0f58f51 commit 214256d

File tree

1 file changed

+31
-32
lines changed

1 file changed

+31
-32
lines changed

src/indexes.rs

Lines changed: 31 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -753,14 +753,14 @@ impl Index {
753753
.await
754754
}
755755

756-
/// Add a raw csv payload to meilisearch.
756+
/// Add a raw csv payload and update them if they already.
757757
///
758758
/// It configures the correct content type for csv data.
759759
///
760-
/// If you send an already existing document (same id) the **whole existing document** will be overwritten by the new document.
761-
/// Fields previously in the document not present in the new document are removed.
760+
/// If you send an already existing document (same id) the old document will be only partially updated according to the fields of the new document.
761+
/// Thus, any fields not present in the new document are kept and remained unchanged.
762762
///
763-
/// For a partial update of the document see [`Index::update_documents_csv`].
763+
/// To completely overwrite a document, check out the [`Index::add_documents_csv`] documents method.
764764
///
765765
/// # Example
766766
///
@@ -774,9 +774,9 @@ impl Index {
774774
/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey");
775775
/// # futures::executor::block_on(async move {
776776
/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY));
777-
/// let movie_index = client.index("add_documents_ndjson");
777+
/// let movie_index = client.index("update_documents_csv");
778778
///
779-
/// let task = movie_index.add_documents_csv(
779+
/// let task = movie_index.update_documents_csv(
780780
/// "id,body\n1,\"doggo\"\n2,\"catto\"".as_bytes(),
781781
/// Some("id"),
782782
/// ).await.unwrap();
@@ -789,23 +789,23 @@ impl Index {
789789
/// # });
790790
/// ```
791791
#[cfg(not(target_arch = "wasm32"))]
792-
pub async fn add_documents_csv<T: futures_io::AsyncRead + Send + Sync + 'static>(
792+
pub async fn update_documents_csv<T: futures_io::AsyncRead + Send + Sync + 'static>(
793793
&self,
794794
payload: T,
795795
primary_key: Option<&str>,
796796
) -> Result<TaskInfo, Error> {
797-
self.add_or_replace_unchecked_payload(payload, "text/csv", primary_key)
797+
self.add_or_update_unchecked_payload(payload, "text/csv", primary_key)
798798
.await
799799
}
800800

801-
/// Add a raw csv payload and update them if they already.
801+
/// Add a raw csv payload to meilisearch.
802802
///
803803
/// It configures the correct content type for csv data.
804804
///
805-
/// If you send an already existing document (same id) the old document will be only partially updated according to the fields of the new document.
806-
/// Thus, any fields not present in the new document are kept and remained unchanged.
805+
/// If you send an already existing document (same id) the **whole existing document** will be overwritten by the new document.
806+
/// Fields previously in the document not present in the new document are removed.
807807
///
808-
/// To completely overwrite a document, check out the [`Index::add_documents_csv`] documents method.
808+
/// For a partial update of the document see [`Index::update_documents_csv`].
809809
///
810810
/// # Example
811811
///
@@ -819,9 +819,9 @@ impl Index {
819819
/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey");
820820
/// # futures::executor::block_on(async move {
821821
/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY));
822-
/// let movie_index = client.index("update_documents_ndjson");
822+
/// let movie_index = client.index("add_documents_csv");
823823
///
824-
/// let task = movie_index.update_documents_csv(
824+
/// let task = movie_index.add_documents_csv(
825825
/// "id,body\n1,\"doggo\"\n2,\"catto\"".as_bytes(),
826826
/// Some("id"),
827827
/// ).await.unwrap();
@@ -834,12 +834,12 @@ impl Index {
834834
/// # });
835835
/// ```
836836
#[cfg(not(target_arch = "wasm32"))]
837-
pub async fn update_documents_csv<T: futures_io::AsyncRead + Send + Sync + 'static>(
837+
pub async fn add_documents_csv<T: futures_io::AsyncRead + Send + Sync + 'static>(
838838
&self,
839839
payload: T,
840840
primary_key: Option<&str>,
841841
) -> Result<TaskInfo, Error> {
842-
self.add_or_update_unchecked_payload(payload, "text/csv", primary_key)
842+
self.add_or_replace_unchecked_payload(payload, "text/csv", primary_key)
843843
.await
844844
}
845845

@@ -2062,20 +2062,21 @@ mod tests {
20622062
}
20632063

20642064
#[meilisearch_test]
2065-
async fn test_update_documents_csv(client: Client, index: Index) -> Result<(), Error> {
2066-
let old_csv = "id,body\n1,\"doggo\"\n2,\"catto\"".as_bytes();
2067-
let updated_csv = "id,body\n1,\"new_doggo\"\n2,\"new_catto\"".as_bytes();
2065+
async fn test_update_documents_ndjson(client: Client, index: Index) -> Result<(), Error> {
2066+
let old_ndjson = r#"{ "id": 1, "body": "doggo" }{ "id": 2, "body": "catto" }"#.as_bytes();
2067+
let updated_ndjson =
2068+
r#"{ "id": 1, "second_body": "second_doggo" }{ "id": 2, "second_body": "second_catto" }"#.as_bytes();
20682069
// Add first njdson document
20692070
let task = index
2070-
.add_documents_csv(old_csv, Some("id"))
2071+
.add_documents_ndjson(old_ndjson, Some("id"))
20712072
.await?
20722073
.wait_for_completion(&client, None, None)
20732074
.await?;
20742075
let _ = index.get_task(task).await?;
20752076

20762077
// Update via njdson document
20772078
let task = index
2078-
.update_documents_csv(updated_csv, Some("id"))
2079+
.update_documents_ndjson(updated_ndjson, Some("id"))
20792080
.await?
20802081
.wait_for_completion(&client, None, None)
20812082
.await?;
@@ -2087,8 +2088,8 @@ mod tests {
20872088
assert!(elements.results.len() == 2);
20882089

20892090
let expected_result = vec![
2090-
json!( {"body": "new_doggo", "id": "1"}),
2091-
json!( {"body": "new_catto", "id": "2"}),
2091+
json!( {"body": "doggo", "id": 1, "second_body": "second_doggo"}),
2092+
json!( {"body": "catto", "id": 2, "second_body": "second_catto"}),
20922093
];
20932094

20942095
assert_eq!(elements.results, expected_result);
@@ -2115,21 +2116,20 @@ mod tests {
21152116
}
21162117

21172118
#[meilisearch_test]
2118-
async fn test_update_documents_ndjson(client: Client, index: Index) -> Result<(), Error> {
2119-
let old_ndjson = r#"{ "id": 1, "body": "doggo" }{ "id": 2, "body": "catto" }"#.as_bytes();
2120-
let updated_ndjson =
2121-
r#"{ "id": 1, "second_body": "second_doggo" }{ "id": 2, "second_body": "second_catto" }"#.as_bytes();
2119+
async fn test_update_documents_csv(client: Client, index: Index) -> Result<(), Error> {
2120+
let old_csv = "id,body\n1,\"doggo\"\n2,\"catto\"".as_bytes();
2121+
let updated_csv = "id,body\n1,\"new_doggo\"\n2,\"new_catto\"".as_bytes();
21222122
// Add first njdson document
21232123
let task = index
2124-
.add_documents_ndjson(old_ndjson, Some("id"))
2124+
.add_documents_csv(old_csv, Some("id"))
21252125
.await?
21262126
.wait_for_completion(&client, None, None)
21272127
.await?;
21282128
let _ = index.get_task(task).await?;
21292129

21302130
// Update via njdson document
21312131
let task = index
2132-
.update_documents_ndjson(updated_ndjson, Some("id"))
2132+
.update_documents_csv(updated_csv, Some("id"))
21332133
.await?
21342134
.wait_for_completion(&client, None, None)
21352135
.await?;
@@ -2141,15 +2141,14 @@ mod tests {
21412141
assert!(elements.results.len() == 2);
21422142

21432143
let expected_result = vec![
2144-
json!( {"body": "doggo", "id": 1, "second_body": "second_doggo"}),
2145-
json!( {"body": "catto", "id": 2, "second_body": "second_catto"}),
2144+
json!( {"body": "new_doggo", "id": "1"}),
2145+
json!( {"body": "new_catto", "id": "2"}),
21462146
];
21472147

21482148
assert_eq!(elements.results, expected_result);
21492149

21502150
Ok(())
21512151
}
2152-
21532152
#[meilisearch_test]
21542153
async fn test_get_one_task(client: Client, index: Index) -> Result<(), Error> {
21552154
let task = index

0 commit comments

Comments
 (0)