DSS(DataSphereStudio)和Linkis的介绍说明不在本文赘述,本文侧重将三方应用通过http(rest)接入DSS。
一,环境说明
环境说明:dss-0.7.0,linkis-0.9.3
二、登录 login
发送请求采用过okhttp,RestTemplate,HTTPClient,json与java bean采用过fastjson,gson,Jackson;因为dss的登录状态保持在cookie中,在经过n次踩雷后最终采用RestTemplate(spring-web:5.0.7)+fastjson(1.2.58) 的方式进行请求,并将登录cookie传递给后续其他请求;
POST /api/rest_j/v1/user/login
请求参数
{
"userName": "hadoop",
"password": "hadoop"
}
返回示例(返回json可能我略有修改,以官方为准)
{
"method": null,
"status": 0,
"message": "login successful(登录成功)!",
"data": {
"isFirstLogin": false,
"isAdmin": true,
"userName": "hadoop"
}
}
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.web.client.RestTemplate;
/**
* 简单工具类,可以根据自己需求扩展
*/
public class HttpUtil {
public static RestTemplate getRestClient() {
// 下面代码HttpClientBuilder会自动管理cookie。
// 如果 HttpClientBuilder.create().disableCookieManagement,则禁用cookie的管理
CloseableHttpClient build = HttpClientBuilder.create().useSystemProperties().build();
return new RestTemplate(new HttpComponentsClientHttpRequestFactory(build));
}
}
import org.springframework.http.ResponseEntity;
/**
* 发送post请求进行登录
* @param restClient
* @return ResponseEntity
*/
private ResponseEntity<JSONObject> login(RestTemplate restClient) {
JSONObject postData = new JSONObject();
postData.put("userName", "hadoop");
postData.put("password", "hadoop");
String loginUrl = "http://ip:port/api/rest_j/v1/user/login";
return restClient.postForEntity(loginUrl, postData, JSONObject.class);
}
三、执行任务 execute
源代码在linkis模块的EntranceRestfulApi
POST /api/rest_j/v1/entrance/execute
请求参数
{
"method":"/api/rest_j/v1/entrance/execute",
"params": {
"variable":{
"k1":"v1"
},
"configuration":{
"special":{
"k2":"v2"
},
"runtime":{
"k3":"v3"
},
"startup":{
"k4":"v4"
}
}
},
"executeApplicationName":"spark",
"executionCode":"show tables",
"runType":"sql",
"source":{
"scriptPath": "/home/Linkis/Linkis.sql"
}
}
返回示例
{
"method": "/api/rest_j/v1/entrance/execute",
"status": 0,
"message": "请求执行成功",
"data": {
"execID": "030418IDEhivebdpdwc010004:10087IDE_johnnwang_21",//执行id,后面要根据它获取任务状态
"taskID": "123" // 任务id,后面要根据它或者执行文件
}
}
/**
*
* @param restClient
* @param sql 要执行的sql代码
* @return
*/
private ResponseEntity<JSONObject> executeSql(RestTemplate restClient, String sql) {
String url = "/api/rest_j/v1/entrance/execute";
JSONObject map = new JSONObject();
map.put("method", url);
map.put("params", new HashMap<>()); //用户指定的运行服务程序的参数,必填,里面的值可以为空
map.put("executeApplicationName", "hive");//执行引擎,我用的hive
map.put("executionCode", sql);
map.put("runType", "sql");//当用户执行如spark服务时,可以选择python、R、SQL等,不能为空
//因为我没有执行文件脚本,所以没有scriptPath参数
String executeSql = "http://ip:port" + url;
return restClient.postForEntity(executeSql, map, JSONObject.class);
}
四、查看任务状态 status
源代码在linkis模块的EntranceRestfulApi
GET /api/rest_j/v1/entrance/${execID}/status
返回示例
{
"method": "/api/rest_j/v1/entrance/{execID}/status",
"status": 0,
"message": "获取状态成功",
"data": {
"execID": "${execID}",
"status": "Running"
}
}
String statusUrl = "http://ip:port/api/rest_j/v1/entrance/" + execID + "/status";
ResponseEntity<JSONObject> statusResp = restTemplate.getForEntity(statusUrl, JSONObject.class);
if (statusResp != null && statusResp.getStatusCode().value() == HttpStatus.SC_OK) {
String status;
for (; ; ) {
statusResp = restTemplate.getForEntity(statusUrl, JSONObject.class);
status = statusResp.getBody().getJSONObject("data").getString("status");
//死循环查看任务状态,如果任务成功或者失败,则退出循环
if ("Succeed".equals(status) || "Failed".equals(status)) {
break;
}
}
if ("Succeed".equals(status)) {
// do something
}
}
五、获取执行结果文件 get
源代码在linkis模块 QueryRestfulApi
GET /api/rest_j/v1/jobhistory/${taskId}/get
返回示例
{
"method": "/api/jobhistory/{id}/get",
"status": 0,
"message": "OK",
"data": {
"task": {
"taskID": 3111,
"instance": "test-dn2:9108",
"execId": "IDE_hadoop_46",
"umUser": "hadoop",
"engineInstance": "test-dn2:37301",
"executionCode": "show databases", //执行的sql
"progress": 1.0,
"logPath": "file:///linkis/hadoop/log/IDE/2020-09-08/3111.log",// 日志路径
"resultLocation": "hdfs:///linkis2/hadoop/dwc/20200908/IDE/3111",//sql执行结果所存储的文件路径
"status": "Succeed",
"createdTime": 1599551337000,
"updatedTime": 1599551339000,
"engineType": null,
"errCode": null,
"errDesc": null,
"executeApplicationName": "hive",
"requestApplicationName": "IDE",
"scriptPath": null,
"runType": "sql",
"paramsJson": "{}",
"costTime": 2000,
"strongerExecId": "030413IDEhivetest-dn2:9108IDE_hadoop_46",
"sourceJson": "{\"scriptPath\":null}"
}
}
}
String historyUrl = "http://ip:port/api/rest_j/v1/jobhistory/" + taskID + "/get";
ResponseEntity<JSONObject> hisResp = restTemplate.getForEntity(historyUrl, JSONObject.class);
if (hisResp != null && hisResp.getStatusCode().value() == HttpStatus.SC_OK) {
String resultLocation = hisResp.getBody().getJSONObject("data").getJSONObject("task").getString("resultLocation");
}
六、打开结果文件 openFile
源代码在linkis模块 FsRestfulApi
GET /api/rest_j/v1/filesystem/openFile?path=${resultLocation}/_0.dolphin
返回示例
{
"method": "/api/filesystem/openFile",
"status": 0,
"message": "OK",
"data": {
"sql查询结果数据"
}
}
String resUrl = "http://ip:port/api/rest_j/v1/filesystem/openFile?path=" + resultLocation + "/_0.dolphin";
ResponseEntity<JSONObject> resResp = restTemplate.getForEntity(resUrl, JSONObject.class);
if (resResp!= null && resResp.getStatusCode().value() == HttpStatus.SC_OK) {
//do something
}
参考文章: (官方dss)上层前端应用HTTP接入文档 (官方dss)登录文档 RestTemplate、okhttp和HttpClient对比 . okclient 对cookie管理 HttpClient 对cookie管理