diff --git a/linkis-commons/linkis-module/pom.xml b/linkis-commons/linkis-module/pom.xml index dc28af3b1b..81601b5094 100644 --- a/linkis-commons/linkis-module/pom.xml +++ b/linkis-commons/linkis-module/pom.xml @@ -47,8 +47,8 @@ ${springfox.version} - classgraph io.github.classgraph + classgraph diff --git a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala index e8eccd35a7..d50e4096f9 100644 --- a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala @@ -274,7 +274,9 @@ class HiveEngineConnExecutor( if (numberOfMRJobs > 0) { engineExecutorContext.appendStdout(s"Your hive sql has $numberOfMRJobs MR jobs to do") val queueName = hiveConf.get(HiveEngineConfiguration.HIVE_QUEUE_NAME) - engineExecutorContext.appendStdout(s"Your task will be submitted to the $queueName queue") + engineExecutorContext.appendStdout( + s"Your task will be submitted to the $queueName queue" + ) } if (thread.isInterrupted) { logger.error( diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/HiveMetaWithPermissionServiceImpl.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/HiveMetaWithPermissionServiceImpl.java index 700a8aab3a..88a4d93b6e 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/HiveMetaWithPermissionServiceImpl.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/HiveMetaWithPermissionServiceImpl.java @@ -103,11 +103,8 @@ public List> getTablesByDbNameAndOptionalUserName( queryParam.withRoles(roles); List> hiveTables = hiveMetaDao.getTablesByDbNameAndUserAndRolesFromDbPrvs(queryParam); - hiveTables.addAll( - hiveMetaDao.getTablesByDbNameAndUserAndRolesFromTblPrvs(queryParam)); - return hiveTables.stream() - .distinct() - .collect(Collectors.toList()); + hiveTables.addAll(hiveMetaDao.getTablesByDbNameAndUserAndRolesFromTblPrvs(queryParam)); + return hiveTables.stream().distinct().collect(Collectors.toList()); } else { log.info("user {} to getTablesByDbName no permission control", queryParam.getUserName()); return hiveMetaDao.getTablesByDbName(queryParam); diff --git a/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/api/UDFRestfulApi.java b/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/api/UDFRestfulApi.java index 50916755a0..f1c48ab0f8 100644 --- a/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/api/UDFRestfulApi.java +++ b/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/api/UDFRestfulApi.java @@ -1437,4 +1437,36 @@ public Message pythonUpload( .data("dependencies", dependencies) .data("fileName", fileName); } + + @ApiImplicitParam( + name = "path", + dataType = "String", + value = "path", + example = "file:///mnt/bdap/hadoop/test1012_01.py") + @RequestMapping(path = "/get-register-functions", method = RequestMethod.GET) + public Message getRegisterFunctions(HttpServletRequest req, @RequestParam("path") String path) { + if (StringUtils.endsWithIgnoreCase(path, Constants.FILE_EXTENSION_PY) + || StringUtils.endsWithIgnoreCase(path, Constants.FILE_EXTENSION_SCALA)) { + if (StringUtils.startsWithIgnoreCase(path, StorageUtils$.MODULE$.FILE_SCHEMA())) { + try { + FsPath fsPath = new FsPath(path); + // 获取文件系统实例 + FileSystem fileSystem = (FileSystem) FSFactory.getFs(fsPath); + fileSystem.init(null); + if (fileSystem.canRead(fsPath)) { + return Message.ok() + .data("functions", UdfUtils.getRegisterFunctions(fileSystem, fsPath, path)); + } else { + return Message.error("您没有权限访问该文件"); + } + } catch (Exception e) { + return Message.error("解析文件失败,错误信息:" + e); + } + } else { + return Message.error("仅支持本地文件"); + } + } else { + return Message.error("仅支持.py和.scala文件"); + } + } } diff --git a/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/conf/Constants.java b/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/conf/Constants.java index ad2692333d..5db18250c8 100644 --- a/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/conf/Constants.java +++ b/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/conf/Constants.java @@ -21,6 +21,7 @@ public class Constants { public static final String FILE_EXTENSION_PY = ".py"; + public static final String FILE_EXTENSION_SCALA = ".scala"; public static final String FILE_EXTENSION_ZIP = ".zip"; public static final String FILE_EXTENSION_TAR_GZ = ".tar.gz"; public static final String FILE_PERMISSION = "770"; diff --git a/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/utils/UdfUtils.java b/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/utils/UdfUtils.java index fb59e6b3a1..f7401fc1f4 100644 --- a/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/utils/UdfUtils.java +++ b/linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/utils/UdfUtils.java @@ -19,7 +19,10 @@ import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.common.io.FsPath; +import org.apache.linkis.common.utils.JsonUtils; import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.storage.fs.FileSystem; +import org.apache.linkis.storage.utils.StorageUtils$; import org.apache.linkis.udf.conf.Constants; import org.apache.linkis.udf.exception.UdfException; @@ -43,6 +46,7 @@ import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; +import com.fasterxml.jackson.core.type.TypeReference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -100,11 +104,11 @@ private static String getRootPath(InputStream inputStream, String folder) throws while ((entry = tarInput.getNextTarEntry()) != null) { if (entry.isDirectory() && entry.getName().endsWith(delimiter)) { rootPathStr = entry.getName().replace(folder + FsPath.SEPARATOR, ""); - return rootPathStr; + return rootPathStr; } if (entry.getName().contains(delimiter)) { rootPathStr = entry.getName().substring(0, entry.getName().indexOf(delimiter)); - return rootPathStr; + return rootPathStr; } } } catch (Exception e) { @@ -158,16 +162,16 @@ public static InputStream getZipInputStreamByTarInputStream( return createZipFile(file.getInputStream(), packageName, rootPath); } else { throw new UdfException( - 80038, - "The name directory " - + packageName - + " specified by PKG-INFO does not exist. Please confirm that the " - + packageName - + " specified by PKG-INFO in the package matches the actual folder name (PKG-INFO指定Name目录" - + packageName - + "不存在,请确认包中PKG-INFO指定" - + packageName - + "和实际文件夹名称一致)"); + 80038, + "The name directory " + + packageName + + " specified by PKG-INFO does not exist. Please confirm that the " + + packageName + + " specified by PKG-INFO in the package matches the actual folder name (PKG-INFO指定Name目录" + + packageName + + "不存在,请确认包中PKG-INFO指定" + + packageName + + "和实际文件夹名称一致)"); } } @@ -280,4 +284,56 @@ public static List extractDependencies(String content, String name) { } return modules; } + + public static List getRegisterFunctions(FileSystem fileSystem, FsPath fsPath, String path) + throws Exception { + try (InputStream is = fileSystem.read(fsPath)) { + // 将inputstream内容转换为字符串 + String content = IOUtils.toString(is, StandardCharsets.UTF_8); + if (StringUtils.endsWith(path, Constants.FILE_EXTENSION_PY)) { + // 解析python文件 + return extractPythonMethodNames(path); + } else if (StringUtils.endsWith(path, Constants.FILE_EXTENSION_SCALA)) { + // 解析scala代码 + return extractScalaMethodNames(content); + } else { + throw new UdfException(80041, "Unsupported file type: " + path); + } + } catch (IOException e) { + throw new UdfException(80042, "Failed to read file: " + path, e); + } + } + + public static List extractScalaMethodNames(String scalaCode) { + List methodNames = new ArrayList<>(); + // 正则表达式匹配方法定义,包括修饰符 + String regex = "(\\b(private|protected)\\b\\s+)?\\bdef\\s+(\\w+)\\b"; + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(scalaCode); + logger.info("use regex to get scala method names.. reg:{}", regex); + while (matcher.find()) { + String methodName = matcher.group(3); + methodNames.add(methodName); + } + + return methodNames; + } + + public static List extractPythonMethodNames(String udfPath) throws Exception { + String localPath = udfPath.replace(StorageUtils$.MODULE$.FILE_SCHEMA(), ""); + String exec = + Utils.exec( + (new String[] { + Constants.PYTHON_COMMAND.getValue(), + Configuration.getLinkisHome() + "/admin/" + "linkis_udf_get_python_methods.py", + localPath + })); + logger.info( + "execute python script to get python method name...{} {} {}", + Constants.PYTHON_COMMAND.getValue(), + Configuration.getLinkisHome() + "/admin/" + "linkis_udf_get_python_methods.py", + localPath); + // 将exec转换为List,exec为一个json数组 + return JsonUtils.jackson().readValue(exec, new TypeReference>() {}); + } } diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml index ce1e2d5753..e744489338 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml @@ -76,8 +76,8 @@ ${springfox.version} - classgraph io.github.classgraph + classgraph