Skip to content

Commit 1a3d970

Browse files
committed
MLE-24983 Updating links to Spark docs
Using "latest" instead of "3.5.6"
1 parent 22d1694 commit 1a3d970

31 files changed

+46
-46
lines changed

docs/import/embedder/embedder.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -98,15 +98,15 @@ options to be used (the deployment names and endpoints are notional):
9898
--embedder azure \
9999
--embedder-prop api-key=changeme \
100100
--embedder-prop deployment-name=text-test-embedding-ada-002 \
101-
--embedder-prop endpoint=https://gpt-testing.openai.azure.com
101+
--embedder-prop endpoint=https://gpt-testing-custom-data1.openai.azure.com
102102
```
103103
{% endtab %}
104104
{% tab log Windows %}
105105
```
106106
--embedder azure ^
107107
--embedder-prop api-key=changeme ^
108108
--embedder-prop deployment-name=text-test-embedding-ada-002 ^
109-
--embedder-prop endpoint=https://gpt-testing.openai.azure.com
109+
--embedder-prop endpoint=https://gpt-testing-custom-data1.openai.azure.com
110110
```
111111
{% endtab %}
112112
{% endtabs %}
@@ -301,7 +301,7 @@ connection string are notional):
301301
--embedder azure \
302302
@azure-api-key.txt \
303303
--embedder-prop deployment-name=text-test-embedding-ada-002 \
304-
--embedder-prop endpoint=https://gpt-testing.openai.azure.com
304+
--embedder-prop endpoint=https://gpt-testing-custom-data1.openai.azure.com
305305
```
306306
{% endtab %}
307307
{% tab log Windows %}
@@ -318,7 +318,7 @@ bin\flux import-files ^
318318
--embedder azure ^
319319
@azure-api-key.txt ^
320320
--embedder-prop deployment-name=text-test-embedding-ada-002 ^
321-
--embedder-prop endpoint=https://gpt-testing.openai.azure.com
321+
--embedder-prop endpoint=https://gpt-testing-custom-data1.openai.azure.com
322322
```
323323
{% endtab %}
324324
{% endtabs %}

flux-cli/src/main/java/com/marklogic/flux/api/AggregateJsonFilesImporter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
/**
1010
* Read either JSON Lines files or files containing arrays of JSON objects from supported file locations using
11-
* <a href="https://spark.apache.org/docs/3.5.6/sql-data-sources-json.html">Spark's JSON support</a>,
11+
* <a href="https://spark.apache.org/docs/latest/sql-data-sources-json.html">Spark's JSON support</a>,
1212
* and write each object as a JSON document to MarkLogic.
1313
*/
1414
public interface AggregateJsonFilesImporter extends Executor<AggregateJsonFilesImporter> {

flux-cli/src/main/java/com/marklogic/flux/api/AvroFilesExporter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
/**
99
* Read rows via Optic from MarkLogic and write them to Avro files on a local filesystem,
1010
* HDFS, or S3 using
11-
* <a href="https://spark.apache.org/docs/3.5.6/sql-data-sources-avro.html">Spark's Avro support</a>.
11+
* <a href="https://spark.apache.org/docs/latest/sql-data-sources-avro.html">Spark's Avro support</a>.
1212
*/
1313
public interface AvroFilesExporter extends Executor<AvroFilesExporter> {
1414

flux-cli/src/main/java/com/marklogic/flux/api/AvroFilesImporter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
/**
99
* Read Avro files from supported file locations using
10-
* <a href="https://spark.apache.org/docs/3.5.6/sql-data-sources-avro.html">Spark's Avro support</a>,
10+
* <a href="https://spark.apache.org/docs/latest/sql-data-sources-avro.html">Spark's Avro support</a>,
1111
* and write JSON or XML documents to MarkLogic.
1212
*/
1313
public interface AvroFilesImporter extends Executor<AvroFilesImporter> {

flux-cli/src/main/java/com/marklogic/flux/api/DelimitedFilesExporter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
/**
1010
* Read rows via Optic from MarkLogic and write them to delimited text files on a local filesystem, HDFS, or S3
11-
* using <a href="https://spark.apache.org/docs/3.5.6/sql-data-sources-csv.html">Spark's CSV support</a>.
11+
* using <a href="https://spark.apache.org/docs/latest/sql-data-sources-csv.html">Spark's CSV support</a>.
1212
*/
1313
public interface DelimitedFilesExporter extends Executor<DelimitedFilesExporter> {
1414

flux-cli/src/main/java/com/marklogic/flux/api/DelimitedFilesImporter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
/**
1010
* Read delimited text files from supported file locations using
11-
* <a href="https://spark.apache.org/docs/3.5.6/sql-data-sources-csv.html">Spark's CSV support</a>,
11+
* <a href="https://spark.apache.org/docs/latest/sql-data-sources-csv.html">Spark's CSV support</a>,
1212
* and write JSON or XML documents to MarkLogic.
1313
*/
1414
public interface DelimitedFilesImporter extends Executor<DelimitedFilesImporter> {

flux-cli/src/main/java/com/marklogic/flux/api/JdbcExporter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
/**
99
* Read rows via Optic from MarkLogic and write them to a table using
10-
* <a href="https://spark.apache.org/docs/3.5.6/sql-data-sources-jdbc.html">Spark's JDBC support</a>.
10+
* <a href="https://spark.apache.org/docs/latest/sql-data-sources-jdbc.html">Spark's JDBC support</a>.
1111
*/
1212
public interface JdbcExporter extends Executor<JdbcExporter> {
1313

flux-cli/src/main/java/com/marklogic/flux/api/JdbcImporter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
/**
99
* Read rows using
10-
* <a href="https://spark.apache.org/docs/3.5.6/sql-data-sources-jdbc.html">Spark's JDBC support</a>
10+
* <a href="https://spark.apache.org/docs/latest/sql-data-sources-jdbc.html">Spark's JDBC support</a>
1111
* and write JSON or XML documents to MarkLogic.
1212
*/
1313
public interface JdbcImporter extends Executor<JdbcImporter> {

flux-cli/src/main/java/com/marklogic/flux/api/JsonLinesFilesExporter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
/**
1010
* Read rows via Optic from MarkLogic and write them to JSON Lines files on a local filesystem, HDFS, or S3 using
11-
* <a href="https://spark.apache.org/docs/3.5.6/sql-data-sources-json.html">Spark's JSON support</a>.
11+
* <a href="https://spark.apache.org/docs/latest/sql-data-sources-json.html">Spark's JSON support</a>.
1212
*/
1313
public interface JsonLinesFilesExporter extends Executor<JsonLinesFilesExporter> {
1414

flux-cli/src/main/java/com/marklogic/flux/api/OrcFilesExporter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
/**
99
* Read rows via Optic from MarkLogic and write them to ORC files on a local filesystem, HDFS, or S3 using
10-
* <a href="https://spark.apache.org/docs/3.5.6/sql-data-sources-orc.html">Spark's ORC support</a>.
10+
* <a href="https://spark.apache.org/docs/latest/sql-data-sources-orc.html">Spark's ORC support</a>.
1111
*/
1212
public interface OrcFilesExporter extends Executor<OrcFilesExporter> {
1313

0 commit comments

Comments
 (0)