Skip to content

Commit afcca39

Browse files
authored
Merge pull request #272 from peacewong/dev-1.1.1
update 1.1.1 manual
2 parents 2cfc543 + 708a2aa commit afcca39

File tree

6 files changed

+33
-21
lines changed

6 files changed

+33
-21
lines changed

docs/deployment/quick_deploy.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -243,6 +243,12 @@ cp mysql-connector-java-5.1.49.jar {LINKIS_HOME}/lib/linkis-commons/public-modul
243243

244244
### 5. Linkis quick startup
245245

246+
**Notice** that if you use dss or other projects that rely on linkis version < 1.1.1, you also need to modify the linkis.properties file:
247+
```shell
248+
echo "wds.linkis.session.ticket.key=bdp-user-ticket-id" >> linkis.properties
249+
```
250+
251+
246252
(1). Start services
247253

248254
Run the following commands on the installation directory to start all services.

docs/development/linkis_config.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -106,8 +106,8 @@ Note: When submitting client parameters, only engine-related parameters, tag par
106106
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // Specify engine type and version
107107
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// Specify the running user and your APPName
108108
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "sql"); // Specify the type of script to run: spark supports: sql, scala, py; Hive: hql; shell: sh; python: python; presto: psql
109-
labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//The job runs for 10s and automatically initiates Kill, the unit is ms
110-
labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//The job is queued for more than 10s and automatically initiates Kill, the unit is ms
109+
labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//The job runs for 10s and automatically initiates Kill, the unit is s
110+
labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//The job is queued for more than 10s and automatically initiates Kill, the unit is s
111111
labels.put(LabelKeyConstant.RETRY_TIMEOUT_KEY, "10000");//The waiting time for the job to retry due to resources and other reasons, the unit is ms. If it fails due to insufficient queue resources, it will initiate 10 retries at intervals by default
112112
labels.put(LabelKeyConstant.TENANT_KEY,"hduser02");//Tenant label, if the tenant parameter is specified for the task, the task will be routed to a separate ECM machine
113113
labels.put(LabelKeyConstant.EXECUTE_ONCE_KEY,"");//Execute the label once, this parameter is not recommended to be set. After setting, the engine will not reuse the task and the engine will end after running. Only a certain task parameter can be specialized. set up

docs/user_guide/sdk_manual.md

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ public class LinkisClientTest {
117117
// set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant
118118
Map<String, Object> labels = new HashMap<String, Object>();
119119
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label
120-
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator
120+
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// required execute user and creator
121121
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType
122122
// set start up map :engineConn start params
123123
Map<String, Object> startupMap = new HashMap<String, Object>(16);
@@ -155,7 +155,7 @@ public class LinkisClientTest {
155155

156156
// 2. build JobExecuteAction (0.X old way of using)
157157
JobExecuteAction executionAction = JobExecuteAction.builder()
158-
.setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation
158+
.setCreator("APPName") //creator, the system name of the client requesting linkis, used for system-level isolation
159159
.addExecuteCode(code) //Execution Code
160160
.setEngineTypeStr("spark") // engineConn type
161161
.setRunTypeStr("py") // code type
@@ -275,14 +275,14 @@ object LinkisClientTest {
275275
// set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant
276276
val labels: util.Map[String, Any] = new util.HashMap[String, Any]
277277
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label
278-
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator
278+
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// required execute user and creator
279279
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType
280280

281281
val startupMap = new java.util.HashMap[String, Any]()
282282
// Support setting engine native parameters,For example: parameters of engines such as spark/hive
283283
startupMap.put("spark.executor.instances", 2);
284284
// setting linkis params
285-
startupMap.put("wds.linkis.rm.yarnqueue", "dws");
285+
startupMap.put("wds.linkis.rm.yarnqueue", "default");
286286
// 2. build jobSubmitAction
287287
val jobSubmitAction = JobSubmitAction.builder
288288
.addExecuteCode(code)
@@ -309,10 +309,10 @@ object LinkisClientTest {
309309
// Support setting engine native parameters,For example: parameters of engines such as spark/hive
310310
startupMap.put("spark.executor.instances", 2)
311311
// setting linkis params
312-
startupMap.put("wds.linkis.rm.yarnqueue", "dws")
312+
startupMap.put("wds.linkis.rm.yarnqueue", "default")
313313
// 2. build JobExecuteAction (0.X old way of using)
314314
val executionAction = JobExecuteAction.builder()
315-
.setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation
315+
.setCreator("APPName") //creator, the system name of the client requesting linkis, used for system-level isolation
316316
.addExecuteCode(code) //Execution Code
317317
.setEngineTypeStr("spark") // engineConn type
318318
.setRunTypeStr("py") // code type

i18n/zh-CN/docusaurus-plugin-content-docs/current/deployment/quick_deploy.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -259,6 +259,11 @@ cp mysql-connector-java-5.1.49.jar {LINKIS_HOME}/lib/linkis-commons/public-modu
259259

260260
### 4.5 快速启动Linkis
261261

262+
**注意** 如果您用的dss或者其他项目依赖的linkis版本<1.1.1,还需要修改linkis.properties文件:
263+
```shell
264+
echo "wds.linkis.session.ticket.key=bdp-user-ticket-id" >> linkis.properties
265+
```
266+
262267
#### 4.5.1 启动服务:
263268

264269
在安装目录执行以下命令,启动所有服务:

i18n/zh-CN/docusaurus-plugin-content-docs/current/development/linkis_config.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -106,8 +106,8 @@ linkis-cli -runtieMap key1=value -runtieMap key2=value
106106
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // 指定引擎类型和版本
107107
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// 指定运行的用户和您的APPName
108108
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "sql"); // 指定运行的脚本类型:spark支持:sql、scala、py;Hive:hql;shell:sh;python:python;presto:psql
109-
labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//job运行10s没完成自动发起Kill,单位为ms
110-
labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//job排队超过10s没完成自动发起Kill,单位为ms
109+
labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//job运行10s没完成自动发起Kill,单位为s
110+
labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//job排队超过10s没完成自动发起Kill,单位为s
111111
labels.put(LabelKeyConstant.RETRY_TIMEOUT_KEY, "10000");//job因为资源等原因失败重试的等待时间,单位为ms,如因为队列资源不足的失败,会默认按间隔发起10次重试
112112
labels.put(LabelKeyConstant.TENANT_KEY,"hduser02");//租户标签,任务如果指定了租户参数则任务会被路由到单独的ECM机器
113113
labels.put(LabelKeyConstant.EXECUTE_ONCE_KEY,"");//执行一次标签,该参数不建议设置,设置后引擎不会复用任务运行完就会结束引擎,只有某个任务参数有特殊化的可以进行设置

i18n/zh-CN/docusaurus-plugin-content-docs/current/user_guide/sdk_manual.md

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ public class LinkisClientTest {
5757
.readTimeout(30000) //set read timeout
5858
.setAuthenticationStrategy(new StaticAuthenticationStrategy()) //AuthenticationStrategy Linkis authen suppory static and Token
5959
.setAuthTokenKey("hadoop") // set submit user
60-
.setAuthTokenValue("hadoop"))) // set passwd or token (setAuthTokenValue("BML-AUTH"))
60+
.setAuthTokenValue("hadoop"))) // set passwd or token (setAuthTokenValue("test"))
6161
.setDWSVersion("v1") //linkis rest version v1
6262
.build();
6363

@@ -72,9 +72,9 @@ public class LinkisClientTest {
7272
try {
7373

7474
System.out.println("user : " + user + ", code : [" + executeCode + "]");
75-
// 3. build job and execute
75+
// 3.推荐用submit的方式,可以指定任务相关的label支持更多特性
7676
JobExecuteResult jobExecuteResult = toSubmit(user, executeCode);
77-
//0.x:JobExecuteResult jobExecuteResult = toExecute(user, executeCode);
77+
//0.x兼容的方式,不推荐使用:JobExecuteResult jobExecuteResult = toExecute(user, executeCode);
7878
System.out.println("execId: " + jobExecuteResult.getExecID() + ", taskId: " + jobExecuteResult.taskID());
7979
// 4. get job jonfo
8080
JobInfoResult jobInfoResult = client.getJobInfo(jobExecuteResult);
@@ -115,8 +115,8 @@ public class LinkisClientTest {
115115
// set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant
116116
Map<String, Object> labels = new HashMap<String, Object>();
117117
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label
118-
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator
119-
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType
118+
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// 请求的用户和应用名,两个参数都不能少,其中APPName不能带有"-"建议替换为"_"
119+
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // 指定脚本类型
120120
// set start up map :engineConn start params
121121
Map<String, Object> startupMap = new HashMap<String, Object>(16);
122122
// Support setting engine native parameters,For example: parameters of engines such as spark/hive
@@ -153,7 +153,7 @@ public class LinkisClientTest {
153153

154154
// 2. build JobExecuteAction (0.X old way of using)
155155
JobExecuteAction executionAction = JobExecuteAction.builder()
156-
.setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation
156+
.setCreator("AppName") //creator, the system name of the client requesting linkis, used for system-level isolation
157157
.addExecuteCode(code) //Execution Code
158158
.setEngineTypeStr("spark") // engineConn type
159159
.setRunTypeStr("py") // code type
@@ -217,6 +217,7 @@ object LinkisClientTest {
217217
try {
218218
// 3. build job and execute
219219
println("user : " + user + ", code : [" + executeCode + "]")
220+
//推荐使用submit,支持传递任务label
220221
val jobExecuteResult = toSubmit(user, executeCode)
221222
//0.X: val jobExecuteResult = toExecute(user, executeCode)
222223
println("execId: " + jobExecuteResult.getExecID + ", taskId: " + jobExecuteResult.taskID)
@@ -271,14 +272,14 @@ object LinkisClientTest {
271272
// set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant
272273
val labels: util.Map[String, Any] = new util.HashMap[String, Any]
273274
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label
274-
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator
275-
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType
275+
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// 请求的用户和应用名,两个参数都不能少,其中APPName不能带有"-"建议替换为"_"
276+
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // 指定脚本类型
276277

277278
val startupMap = new java.util.HashMap[String, Any]()
278279
// Support setting engine native parameters,For example: parameters of engines such as spark/hive
279280
startupMap.put("spark.executor.instances", 2);
280281
// setting linkis params
281-
startupMap.put("wds.linkis.rm.yarnqueue", "dws");
282+
startupMap.put("wds.linkis.rm.yarnqueue", "default");
282283
// 2. build jobSubmitAction
283284
val jobSubmitAction = JobSubmitAction.builder
284285
.addExecuteCode(code)
@@ -305,10 +306,10 @@ object LinkisClientTest {
305306
// Support setting engine native parameters,For example: parameters of engines such as spark/hive
306307
startupMap.put("spark.executor.instances", 2)
307308
// setting linkis params
308-
startupMap.put("wds.linkis.rm.yarnqueue", "dws")
309+
startupMap.put("wds.linkis.rm.yarnqueue", "default")
309310
// 2. build JobExecuteAction (0.X old way of using)
310311
val executionAction = JobExecuteAction.builder()
311-
.setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation
312+
.setCreator("APPName") //creator, the system name of the client requesting linkis, used for system-level isolation
312313
.addExecuteCode(code) //Execution Code
313314
.setEngineTypeStr("spark") // engineConn type
314315
.setRunTypeStr("py") // code type

0 commit comments

Comments
 (0)