This is an automated email from the ASF dual-hosted git repository.

jlfsdtc pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/kylin5 by this push:
     new b3cac1013a KYLIN-5986 remove 
`org.apache.kylin.rest.controller.SparkSourceController`
b3cac1013a is described below

commit b3cac1013a6548aac44fb1d43d7366a14f000d0f
Author: jlf <[email protected]>
AuthorDate: Thu Feb 13 14:43:21 2025 +0800

    KYLIN-5986 remove `org.apache.kylin.rest.controller.SparkSourceController`
---
 .../rest/controller/SparkSourceController.java     | 145 ----------------
 .../rest/controller/SparkSourceControllerTest.java | 186 ---------------------
 2 files changed, 331 deletions(-)

diff --git 
a/src/query-server/src/main/java/org/apache/kylin/rest/controller/SparkSourceController.java
 
b/src/query-server/src/main/java/org/apache/kylin/rest/controller/SparkSourceController.java
deleted file mode 100644
index 2d983ede77..0000000000
--- 
a/src/query-server/src/main/java/org/apache/kylin/rest/controller/SparkSourceController.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.kylin.rest.controller;
-
-import static 
org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_JSON;
-import static 
org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.kylin.common.exception.KylinException;
-import org.apache.kylin.rest.response.EnvelopeResponse;
-import org.apache.kylin.rest.request.DDLRequest;
-import org.apache.kylin.rest.request.ExportTableRequest;
-import org.apache.kylin.rest.response.DDLResponse;
-import org.apache.kylin.rest.response.ExportTablesResponse;
-import org.apache.kylin.rest.response.TableNameResponse;
-import org.apache.kylin.rest.service.SparkSourceService;
-import org.apache.spark.sql.AnalysisException;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
-import org.springframework.web.bind.annotation.DeleteMapping;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestParam;
-import org.springframework.web.bind.annotation.ResponseBody;
-import org.springframework.web.bind.annotation.RestController;
-
-import io.swagger.annotations.ApiOperation;
-import lombok.extern.slf4j.Slf4j;
-
-@ConditionalOnProperty(name = "kylin.env.channel", havingValue = "cloud")
-@RestController
-@RequestMapping(value = "/api/spark_source", produces = { 
HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON,
-        HTTP_VND_APACHE_KYLIN_JSON })
-@Slf4j
-public class SparkSourceController extends NBasicController {
-
-    @Autowired
-    private SparkSourceService sparkSourceService;
-
-    @ApiOperation(value = "execute", tags = { "DW" })
-    @PostMapping(value = "/execute")
-    @ResponseBody
-    public EnvelopeResponse<DDLResponse> executeSQL(@RequestBody DDLRequest 
request) {
-        DDLResponse ddlResponse = sparkSourceService.executeSQL(request);
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, 
ddlResponse, "");
-    }
-
-    @ApiOperation(value = "exportTable", tags = { "DW" })
-    @PostMapping(value = "/export_table_structure")
-    @ResponseBody
-    public EnvelopeResponse<ExportTablesResponse> 
exportTableStructure(@RequestBody ExportTableRequest request) {
-        ExportTablesResponse tableResponse = 
sparkSourceService.exportTables(request.getDatabases(),
-                request.getTables());
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, 
tableResponse, "");
-    }
-
-    @ApiOperation(value = "dropTable", tags = { "DW" })
-    @DeleteMapping(value = "/{database}/tables/{table}")
-    public EnvelopeResponse<String> dropTable(@PathVariable("database") String 
database,
-            @PathVariable("table") String table) throws AnalysisException {
-        sparkSourceService.dropTable(database, table);
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "");
-    }
-
-    @ApiOperation(value = "listDatabase", tags = { "DW" })
-    @GetMapping(value = "/databases")
-    public EnvelopeResponse<List<String>> listDatabase() {
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, 
sparkSourceService.listDatabase(), "");
-    }
-
-    @ApiOperation(value = "listTables", tags = { "DW" })
-    @GetMapping(value = "/{database}/tables")
-    public EnvelopeResponse<List<TableNameResponse>> 
listTables(@PathVariable("database") String database,
-            @RequestParam("project") String project) throws Exception {
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, 
sparkSourceService.listTables(database, project), "");
-    }
-
-    @ApiOperation(value = "listColumns", tags = { "DW" })
-    @GetMapping(value = "/{database}/{table}/columns")
-    public EnvelopeResponse<List<SparkSourceService.ColumnModel>> 
listColumns(@PathVariable("database") String database,
-            @PathVariable("table") String table) {
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, 
sparkSourceService.listColumns(database, table), "");
-    }
-
-    @ApiOperation(value = "getTableDesc", tags = { "DW" })
-    @GetMapping(value = "/{database}/{table}/desc")
-    public EnvelopeResponse<String> getTableDesc(@PathVariable("database") 
String database,
-            @PathVariable("table") String table) {
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, 
sparkSourceService.getTableDesc(database, table), "");
-    }
-
-    @ApiOperation(value = "hasPartition", tags = { "DW" })
-    @GetMapping(value = "{database}/{table}/has_partition")
-    public EnvelopeResponse<Boolean> hasPartition(@PathVariable("database") 
String database,
-            @PathVariable("table") String table) {
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, 
sparkSourceService.hasPartition(database, table), "");
-    }
-
-    @ApiOperation(value = "databaseExists", tags = { "DW" })
-    @GetMapping(value = "/{database}/exists")
-    public EnvelopeResponse<Boolean> databaseExists(@PathVariable("database") 
String database) {
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, 
sparkSourceService.databaseExists(database), "");
-    }
-
-    @ApiOperation(value = "tableExists", tags = { "DW" })
-    @GetMapping(value = "/{database}/{table}/exists")
-    public EnvelopeResponse<Boolean> tableExists(@PathVariable("database") 
String database,
-            @PathVariable("table") String table) {
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, 
sparkSourceService.tableExists(database, table), "");
-    }
-
-    @ApiOperation(value = "loadSamples", tags = { "DW" })
-    @GetMapping(value = "/load_samples")
-    public EnvelopeResponse<List<String>> loadSamples() throws 
InterruptedException, IOException {
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, 
sparkSourceService.loadSamples(), "");
-    }
-
-    @ApiOperation(value = "msck", tags = { "DW" })
-    @GetMapping(value = "/{database}/{table}/msck")
-    public EnvelopeResponse<List<String>> msck(@PathVariable("database") 
String database,
-            @PathVariable("table") String table) {
-        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, 
sparkSourceService.msck(database, table), "");
-    }
-
-}
diff --git 
a/src/query-server/src/test/java/org/apache/kylin/rest/controller/SparkSourceControllerTest.java
 
b/src/query-server/src/test/java/org/apache/kylin/rest/controller/SparkSourceControllerTest.java
deleted file mode 100644
index 40791a9eaa..0000000000
--- 
a/src/query-server/src/test/java/org/apache/kylin/rest/controller/SparkSourceControllerTest.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.rest.controller;
-
-import static 
org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_JSON;
-
-import org.apache.kylin.common.util.JsonUtil;
-import org.apache.kylin.junit.annotation.MetadataInfo;
-import org.apache.kylin.rest.constant.Constant;
-import org.apache.kylin.rest.request.DDLRequest;
-import org.apache.kylin.rest.request.ExportTableRequest;
-import org.apache.kylin.rest.service.SparkSourceService;
-import org.junit.jupiter.api.AfterEach;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.MockitoAnnotations;
-import org.springframework.http.MediaType;
-import org.springframework.security.authentication.TestingAuthenticationToken;
-import org.springframework.security.core.Authentication;
-import org.springframework.security.core.context.SecurityContextHolder;
-import org.springframework.test.web.servlet.MockMvc;
-import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
-import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
-import org.springframework.test.web.servlet.setup.MockMvcBuilders;
-
-@MetadataInfo(onlyProps = true)
-public class SparkSourceControllerTest {
-    private MockMvc mockMvc;
-
-    @Mock
-    private SparkSourceService sparkSourceService;
-
-    @InjectMocks
-    private SparkSourceController sparkSourceController = Mockito.spy(new 
SparkSourceController());
-
-    private final Authentication authentication = new 
TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN);
-
-    @BeforeEach
-    public void setup() {
-        MockitoAnnotations.initMocks(this);
-        mockMvc = 
MockMvcBuilders.standaloneSetup(sparkSourceController).defaultRequest(MockMvcRequestBuilders.get("/"))
-                .build();
-        SecurityContextHolder.getContext().setAuthentication(authentication);
-    }
-
-    @AfterEach
-    public void tearDown() {
-    }
-
-    @Test
-    public void testExecuteSQL() throws Exception {
-        DDLRequest ddlRequest = new DDLRequest();
-        ddlRequest.setSql("show databases");
-        ddlRequest.setDatabase("default");
-        
mockMvc.perform(MockMvcRequestBuilders.post("/api/spark_source/execute").contentType(MediaType.APPLICATION_JSON)
-                .content(JsonUtil.writeValueAsString(ddlRequest))
-                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-
-        Mockito.verify(sparkSourceController).executeSQL(ddlRequest);
-    }
-
-    @Test
-    public void testExportTableStructuree() throws Exception {
-        ExportTableRequest request = new ExportTableRequest();
-        request.setDatabases("SSB");
-        request.setTables(new String[] { "LINEORDER", "DATES" });
-        
mockMvc.perform(MockMvcRequestBuilders.post("/api/spark_source/export_table_structure")
-                
.contentType(MediaType.APPLICATION_JSON).content(JsonUtil.writeValueAsString(request))
-                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-        Mockito.verify(sparkSourceController).exportTableStructure(request);
-    }
-
-    @Test
-    public void testDropTable() throws Exception {
-        mockMvc.perform(MockMvcRequestBuilders
-                .delete("/api/spark_source/{database}/tables/{table}", 
"default", "COUNTRY")
-                
.contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-
-        Mockito.verify(sparkSourceController).dropTable("default", "COUNTRY");
-    }
-
-    @Test
-    public void testListDatabase() throws Exception {
-        
mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/databases")
-                
.contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-
-        Mockito.verify(sparkSourceController).listDatabase();
-    }
-
-    @Test
-    public void testListTables() throws Exception {
-        
mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/{database}/tables",
 "default")
-                .contentType(MediaType.APPLICATION_JSON).param("project", 
"test")
-                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-
-        Mockito.verify(sparkSourceController).listTables("default", "test");
-    }
-
-    @Test
-    public void testListColumns() throws Exception {
-        
mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/{database}/{table}/columns",
 "default", "COUNTRY")
-                
.contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-
-        Mockito.verify(sparkSourceController).listColumns("default", 
"COUNTRY");
-    }
-
-    @Test
-    public void testGetTableDesc() throws Exception {
-        
mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/{database}/{table}/desc",
 "default", "COUNTRY")
-                
.contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-
-        Mockito.verify(sparkSourceController).getTableDesc("default", 
"COUNTRY");
-    }
-
-    @Test
-    public void testHasPartition() throws Exception {
-        mockMvc.perform(MockMvcRequestBuilders
-                .get("/api/spark_source/{database}/{table}/has_partition", 
"default", "COUNTRY")
-                
.contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-
-        Mockito.verify(sparkSourceController).hasPartition("default", 
"COUNTRY");
-    }
-
-    @Test
-    public void testDatabaseExists() throws Exception {
-        
mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/{database}/exists",
 "default")
-                
.contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-
-        Mockito.verify(sparkSourceController).databaseExists("default");
-    }
-
-    @Test
-    public void testTableExists() throws Exception {
-        
mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/{database}/{table}/exists",
 "default", "COUNTRY")
-                
.contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-
-        Mockito.verify(sparkSourceController).tableExists("default", 
"COUNTRY");
-    }
-
-    @Test
-    public void testLoadSamples() throws Exception {
-        
mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/load_samples")
-                
.contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-
-        Mockito.verify(sparkSourceController).loadSamples();
-    }
-
-    @Test
-    public void testMsck() throws Exception {
-        
mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/{database}/{table}/msck",
 "default", "COUNTRY")
-                
.contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
-
-        Mockito.verify(sparkSourceController).msck("default", "COUNTRY");
-    }
-}

Reply via email to