From 9036be666bf177d57753e52bbdfa1eefac172407 Mon Sep 17 00:00:00 2001 From: aiceflower Date: Tue, 5 Nov 2024 16:36:18 +0800 Subject: [PATCH] Add a new management console to modify the engine status to UnHealthy and automatically kill the engine after task execution The users can specify fixed engine execution Add a new management console to modify the engine status to UnHealthy and automatically kill the engine after task execution Optimize engine reuse logic, large resources cannot reuse small resource engines Optimize the flink log Linkis storage change java code to scala (The core process has some compatibility issues, and some of them have been reversed.) --- .../linkis/common/conf/Configuration.scala | 2 +- .../hadoop/common/utils/HDFSUtils.scala | 4 +- .../org/apache/linkis/storage/FSFactory.java | 96 -- .../storage/conf/LinkisStorageConf.java | 82 -- .../linkis/storage/csv/StorageCSVWriter.java | 146 --- .../linkis/storage/domain/DataType.java | 237 ----- .../apache/linkis/storage/domain/Dolphin.java | 141 --- .../linkis/storage/domain/MethodEntity.java | 132 --- .../domain/MethodEntitySerializer.java | 84 -- .../LinkisStorageErrorCodeSummary.java | 8 + .../linkis/storage/excel/ExcelXlsReader.java | 278 ----- .../linkis/storage/excel/FirstRowDeal.java | 55 - .../linkis/storage/excel/RowToCsvDeal.java | 66 -- .../storage/excel/StorageExcelWriter.java | 310 ------ .../apache/linkis/storage/excel/XlsUtils.java | 51 +- .../linkis/storage/excel/XlsxUtils.java | 55 +- .../exception/ColLengthExceedException.java | 16 +- .../exception/ColumnIndexExceedException.java | 34 + .../storage/exception/StorageErrorCode.java | 2 - .../factory/impl/BuildHDFSFileSystem.java | 6 +- .../factory/impl/BuildLocalFileSystem.java | 8 +- .../storage/factory/impl/BuildOSSSystem.java | 70 -- .../factory/impl/BuildS3FileSystem.java | 62 -- .../apache/linkis/storage/fs/FileSystem.java | 6 +- .../storage/fs/impl/HDFSFileSystem.java | 98 +- .../storage/fs/impl/LocalFileSystem.java | 43 +- .../linkis/storage/fs/impl/OSSFileSystem.java | 398 -------- .../linkis/storage/fs/impl/S3FileSystem.java | 464 --------- .../io/IOMethodInterceptorFactory.java | 49 - .../resultset/DefaultResultSetFactory.java | 198 ---- .../storage/resultset/OrcResultSetReader.java | 212 ---- .../storage/resultset/OrcResultSetWriter.java | 141 --- .../resultset/ParquetResultSetReader.java | 158 --- .../resultset/ParquetResultSetWriter.java | 135 --- .../storage/resultset/ResultSetFactory.java | 74 -- .../resultset/ResultSetReaderFactory.java | 133 --- .../resultset/ResultSetWriterFactory.java | 102 -- .../storage/resultset/StorageResultSet.java | 88 -- .../resultset/StorageResultSetReader.java | 175 ---- .../resultset/StorageResultSetWriter.java | 268 ----- .../storage/resultset/html/HtmlResultSet.java | 48 - .../resultset/io/IOResultDeserializer.java | 37 - .../resultset/io/IOResultSerializer.java | 47 - .../storage/resultset/io/IOResultSet.java | 43 - .../resultset/picture/PictureResultSet.java | 48 - .../table/TableResultDeserializer.java | 100 -- .../table/TableResultSerializer.java | 111 -- .../resultset/table/TableResultSet.java | 44 - .../resultset/txt/TextResultDeserializer.java | 36 - .../resultset/txt/TextResultSerializer.java | 50 - .../storage/resultset/txt/TextResultSet.java | 46 - .../linkis/storage/script/Compaction.java | 44 - .../linkis/storage/script/ParserFactory.java | 31 - .../linkis/storage/script/ScriptFsWriter.java | 37 - .../linkis/storage/script/Variable.java | 48 - .../linkis/storage/script/VariableParser.java | 131 --- .../script/compaction/PYScriptCompaction.java | 45 - .../script/compaction/QLScriptCompaction.java | 45 - .../compaction/ScalaScriptCompaction.java | 45 - .../compaction/ShellScriptCompaction.java | 45 - .../script/parser/CommonScriptParser.java | 75 -- .../storage/script/parser/PYScriptParser.java | 45 - .../storage/script/parser/QLScriptParser.java | 45 - .../script/parser/ScalaScriptParser.java | 45 - .../script/parser/ShellScriptParser.java | 45 - .../script/reader/StorageScriptFsReader.java | 161 --- .../script/writer/StorageScriptFsWriter.java | 124 --- .../storage/source/AbstractFileSource.java | 124 --- .../linkis/storage/source/FileSource.java | 181 ---- .../linkis/storage/source/FileSplit.java | 324 ------ .../storage/source/ResultsetFileSource.java | 64 -- .../linkis/storage/source/TextFileSource.java | 71 -- .../linkis/storage/utils/FileSystemUtils.java | 191 ---- .../apache/linkis/storage/utils/OrcUtils.java | 152 --- .../storage/utils/StorageConfiguration.java | 161 --- .../linkis/storage/utils/StorageHelper.java | 157 --- .../linkis/storage/utils/StorageUtils.java | 279 ----- .../org/apache/linkis/storage/FSFactory.scala | 98 ++ .../apache/linkis/storage/LineMetaData.scala | 20 +- .../apache/linkis/storage/LineRecord.scala | 19 +- .../storage/conf/LinkisStorageConf.scala | 81 ++ .../linkis/storage/csv/CSVFsReader.scala | 11 +- .../linkis/storage/csv/CSVFsWriter.scala | 48 + .../linkis/storage/csv/StorageCSVReader.scala | 4 +- .../linkis/storage/csv/StorageCSVWriter.scala | 117 +++ .../linkis/storage/domain/DataType.scala | 164 +++ .../linkis/storage/domain/Dolphin.scala | 130 +++ .../linkis/storage/domain/MethodEntity.scala | 97 ++ .../linkis/storage/excel/ExcelFsReader.scala | 22 + .../linkis/storage/excel/ExcelFsWriter.scala} | 40 +- .../linkis/storage/excel/ExcelXlsReader.java | 378 +++++++ .../storage/excel/StorageExcelReader.scala | 4 +- .../storage/excel/StorageExcelWriter.scala | 236 +++++ .../excel/StorageMultiExcelWriter.scala} | 43 +- .../apache/linkis/storage/io/IOClient.scala | 75 ++ .../io/IOMethodInterceptorCreator.scala} | 46 +- .../resultset/DefaultResultSetFactory.scala | 130 +++ .../storage/resultset/ResultMetaData.scala} | 6 +- .../storage/resultset/ResultRecord.scala | 22 + .../storage/resultset/ResultSetFactory.scala | 69 ++ .../storage/resultset/ResultSetReader.scala | 93 ++ .../storage/resultset/ResultSetWriter.scala | 74 ++ .../storage/resultset/StorageResultSet.scala | 51 + .../resultset/StorageResultSetReader.scala | 174 ++++ .../resultset/StorageResultSetWriter.scala | 230 +++++ .../resultset/html/HtmlResultSet.scala | 34 + .../storage/resultset/io/IOMetaData.scala | 25 + .../storage/resultset/io/IORecord.scala | 25 + .../resultset/io/IOResultDeserializer.scala} | 26 +- .../resultset/io/IOResultSerializer.scala | 43 + .../storage/resultset/io/IOResultSet.scala | 32 + .../resultset/picture/PictureResultSet.scala | 22 +- .../resultset/table/TableMetaData.scala | 20 +- .../storage/resultset/table/TableRecord.scala | 14 +- .../table/TableResultDeserializer.scala | 156 +++ .../table/TableResultSerializer.scala | 99 ++ .../resultset/table/TableResultSet.scala} | 28 +- .../txt/TextResultDeserializer.scala | 34 + .../resultset/txt/TextResultSerializer.scala | 46 + .../storage/resultset/txt/TextResultSet.scala | 33 + .../storage/script/ScriptFsReader.scala | 37 + .../storage/script/ScriptFsWriter.scala | 115 +++ .../storage/script/VariableParser.scala | 117 +++ .../compaction/CommonScriptCompaction.scala} | 30 +- .../compaction/PYScriptCompaction.scala | 39 + .../compaction/QLScriptCompaction.scala | 38 + .../compaction/ScalaScriptCompaction.scala | 38 + .../compaction/ShellScriptCompaction.scala} | 30 +- .../script/parser/CommonScriptParser.scala | 63 ++ .../script/parser/PYScriptParser.scala | 26 +- .../script/parser/QLScriptParser.scala | 26 +- .../script/parser/ScalaScriptParser.scala | 39 + .../script/parser/ShellScriptParser.scala | 39 + .../script/reader/StorageScriptFsReader.scala | 126 +++ .../script/writer/StorageScriptFsWriter.scala | 120 +++ .../storage/source/AbstractFileSource.scala | 82 ++ .../linkis/storage/source/FileSource.scala | 160 +++ .../linkis/storage/source/FileSplit.scala | 202 ++++ .../storage/source/ResultsetFileSource.scala | 53 + .../storage/source/TextFileSource.scala | 52 + .../storage/utils/FileSystemUtils.scala | 171 ++++ .../storage/utils/StorageConfiguration.scala | 85 ++ .../linkis/storage/utils/StorageHelper.scala | 107 ++ .../linkis/storage/utils/StorageUtils.scala | 231 +++++ .../linkis/storage/LineMetaDataTest.java | 36 - .../StorageResultSetWriterFactoryTest.java | 53 - .../writer/StorageScriptFsWriterTest.java | 8 +- .../source/ResultsetFileSourceTest.java | 50 - .../test/resources/result-read-test.dolphin | 1 - .../test/resources/storage-read-test.dolphin | 1 - .../linkis/storage/domain/DataTypeTest.scala | 17 +- .../utils/StorageConfigurationTest.scala | 5 +- .../storage/utils/StorageUtilsTest.scala | 16 +- .../test/resources/conf/linkis-cli.properties | 2 +- .../src/test/resources/linkis-cli.properties | 2 +- .../ujes/client/JobObserveActionTest.scala | 2 +- .../governance/common/utils/ECPathUtils.java | 53 - ...onConf.scala => GovernaceCommonConf.scala} | 21 +- .../constant/job/JobRequestConstants.scala | 5 - .../exception/GovernanceErrorException.scala | 5 +- .../common/protocol/task/RequestTask.scala | 3 +- .../protocol/task/ResponseEngineConnPid.scala | 3 +- .../protocol/task/ResponseTaskExecute.scala | 27 - .../governance/common/utils/ECPathUtils.scala | 66 ++ .../common/utils/EngineConnArguments.scala | 8 +- .../common/utils/GovernanceUtils.scala | 27 +- .../utils/OnceExecutorContentUtils.scala | 10 +- .../conf/GovernanceCommonConfTest.scala | 4 +- .../core/launch/ProcessEngineConnLaunch.scala | 26 +- .../linkis/ecm/core/utils/PortUtils.scala | 2 +- .../src/main/assembly/distribution.xml | 3 - .../linkis/ecm/restful/ECMRestfulApi.java | 2 +- .../linkis/ecm/scheduled/EcmClearTask.java | 2 +- .../operator/EngineConnLogOperator.java | 219 ---- .../operator/EngineConnYarnLogOperator.java | 149 --- .../impl/DefaultEngineConnKillService.java | 6 +- .../linkis/ecm/utils/ECMCacheUtils.java} | 37 +- .../ecm/server/conf/ECMConfiguration.scala | 17 +- .../server/exception/ECMErrorException.scala | 11 +- .../ecm/server/hook/JarUDFLoadECMHook.scala | 1 + .../operator/EngineConnLogOperator.scala | 193 ++++ .../operator/EngineConnYarnLogOperator.scala | 194 ++++ .../server/service/ECMMetricsService.scala | 4 +- .../AbstractEngineConnLaunchService.scala | 7 +- .../impl/BmlResourceLocalizationService.scala | 2 + .../impl/DefaultECMMetricsService.scala | 25 + .../impl/DefaultECMRegisterService.scala | 22 +- .../service/impl/DefaultOperateService.scala | 24 +- .../impl/ProcessEngineConnLaunchService.scala | 41 +- .../impl/DefaultECMRegisterServiceTest.java | 55 - .../OperableOnceEngineConnOperator.java | 92 -- .../once/executor/OnceExecutor.scala | 8 +- .../OnceExecutorExecutionContext.scala | 8 +- .../OperableOnceEngineConnOperator.scala | 72 ++ .../AsyncConcurrentComputationExecutor.scala | 66 +- .../bml/BmlEnginePreExecuteHook.scala | 4 + .../conf/ComputationExecutorConf.scala | 8 +- .../executor/cs/CSTableRegister.scala | 2 +- .../executor/cs/CSTableResultSetWriter.scala | 2 +- .../execute/ComputationExecutor.scala | 44 +- .../ConcurrentComputationExecutor.scala | 51 +- .../execute/EngineExecutionContext.scala | 37 +- .../hook/ComputationExecutorHook.scala | 7 + .../executor/hook/InitSQLHook.scala | 3 +- .../executor/hook/PythonModuleLoad.scala | 161 +++ .../hook/PythonModuleLoadEngineConnHook.scala | 64 ++ .../executor/hook/PythonSparkEngineHook.scala | 45 + .../executor/hook/UDFLoadEngineConnHook.scala | 3 +- .../executor/hook/UseDatabaseEngineHook.scala | 2 +- .../hook/executor/ExecuteOnceHook.scala | 8 + .../executor/rs/RsOutputStream.scala | 6 +- .../service/TaskExecutionServiceImpl.scala | 255 ++--- .../upstream/ECTaskEntranceMonitor.scala | 2 +- ...ingleThreadUpstreamConnectionMonitor.scala | 21 + .../ECTaskEntranceMonitorService.scala | 3 +- .../utlis/ComputationEngineConstant.scala | 2 + .../common/conf/EngineConnConf.scala | 4 - .../common/conf/EngineConnConstant.scala | 2 +- .../core/util/EngineConnUtils.scala | 2 +- .../executor/log/AbstractLogCache.java | 39 +- .../acessible/executor/log/MountLogCache.java | 2 +- .../acessible/executor/log/SendAppender.java | 38 +- .../acessible/executor/log/TimeLogCache.java | 25 +- .../EngineConnApplicationInfoOperator.java | 57 -- .../AccessibleEngineConnExecution.scala | 2 +- .../info/NodeHealthyInfoManager.scala | 2 +- .../acessible/executor/log/LogHelper.scala | 7 + .../EngineConnApplicationInfoOperator.scala | 52 + .../service/DefaultManagerService.scala | 3 +- .../service/DefaultOperateService.scala | 29 +- .../service/EngineConnTimedLockService.scala | 12 +- .../executor/service/LockService.scala | 7 +- .../utils/AccessableExecutorUtils.scala | 1 + .../hook/CallbackEngineConnHook.scala | 25 +- .../callback/service/EngineConnCallback.scala | 2 +- ...ala => EngineConnIdentifierCallback.scala} | 1 + .../service/EngineConnTimedCallback.scala | 20 + .../executor/ExecutorExecutionContext.scala | 17 +- .../EngineConnExecutorConfiguration.scala | 3 + .../EngineconnCoreErrorCodeSummary.java | 6 +- .../exception/EngineConnPluginErrorCode.scala | 26 + .../EngineConnPluginErrorException.scala | 7 +- .../common/resource/UserNodeResource.scala | 22 + .../linkis/entrance/parser/ParserUtils.java | 2 +- .../entrance/restful/EntranceRestfulApi.java | 2 +- .../impl/LogPathCreateInterceptor.scala | 6 +- .../linkis-application-manager/pom.xml | 6 + .../loader/EngineConnPluginLoaderConf.java | 44 - .../CacheablesEngineConnPluginLoader.java | 3 +- .../DefaultEngineConnPluginLoader.java | 27 +- .../BmlEngineConnPluginResourceLoader.java | 4 +- .../loader/utils/EngineConnPluginUtils.java | 2 +- .../conf/EngineConnPluginConfiguration.java | 42 - .../EngineConnPluginsLoaderFactory.java | 31 - ...bstractEngineConnBmlResourceGenerator.java | 134 --- ...DefaultEngineConnBmlResourceGenerator.java | 114 --- .../EngineConnLocalizeResourceImpl.java | 69 -- .../DefaultEngineConnLaunchService.java | 106 -- ...faultEngineConnResourceFactoryService.java | 80 -- .../DefaultEngineConnResourceService.java | 351 ------- .../service/EngineConnResourceRequest.java | 53 - .../service/EngineConnResourceService.java | 43 - .../service/GetEngineConnResourceRequest.java | 25 - .../RefreshAllEngineConnResourceRequest.java | 28 - .../RefreshEngineConnResourceRequest.java | 25 - .../impl/EnginePluginAdminServiceImpl.java | 18 +- .../manager/LinkisManagerApplication.java | 1 + .../manager/am/conf/AMConfiguration.java | 134 ++- ...ApplicationManagerSpringConfiguration.java | 32 - .../am/conf/ConfigurationMapCache.java | 4 +- ...DefaultEngineConnConfigurationService.java | 5 +- .../manager/am/conf/ManagerMonitorConf.java | 9 +- .../am/converter/DefaultMetricsConverter.java | 17 +- .../converter}/MetricsConverter.java | 2 +- .../manager/am/exception/AMErrorCode.java | 3 +- .../am/exception/AMErrorException.java | 10 +- .../manager/am/label/AMLabelChecker.java | 2 - .../manager/am/label/AMLabelFilter.java | 4 +- .../common => am}/label/LabelChecker.java | 2 +- .../MultiUserEngineReuseLabelChooser.java | 2 - .../am/locker/DefaultEngineNodeLocker.java | 10 +- .../am/manager/DefaultEMNodeManager.java | 10 +- .../am/manager/DefaultEngineNodeManager.java | 129 +-- .../manager/am/manager/EMNodeManager.java | 7 +- .../manager/am/manager/EngineNodeManager.java | 2 + .../am/pointer/AbstractNodePointer.java | 18 +- .../am/pointer/DefaultEMNodPointer.java | 42 +- .../am/pointer/DefaultEngineNodPointer.java | 10 +- .../am/pointer/DefaultNodePointerBuilder.java | 3 - .../common => am}/pointer/EMNodPointer.java | 2 +- .../pointer/EngineNodePointer.java | 2 +- .../common => am}/pointer/NodePointer.java | 2 +- .../pointer/NodePointerBuilder.java | 2 +- .../am/restful/ECResourceInfoRestfulApi.java | 110 +- .../manager/am/restful/EMRestfulApi.java | 170 +++- .../manager/am/restful/EngineRestfulApi.java | 109 +- .../am/selector/DefaultECAvailableRule.java | 43 - .../am/selector/DefaultNodeSelector.java | 75 -- .../rule/AvailableNodeSelectRule.java | 68 -- .../rule/ConcurrencyNodeSelectRule.java | 37 - .../am/selector/rule/NewECMStandbyRule.java | 82 -- .../selector/rule/ResourceNodeSelectRule.java | 124 --- .../am/selector/rule/ScoreNodeSelectRule.java | 70 -- .../selector/rule/TaskInfoNodeSelectRule.java | 73 -- .../am/service/ECResourceInfoService.java | 17 +- .../ConfCacheRemoveBroadcastListener.java | 75 -- .../service/em/DefaultECMOperateService.java | 37 - .../am/service/em/DefaultEMEngineService.java | 231 ----- .../am/service/em/DefaultEMInfoService.java | 142 --- .../service/em/DefaultEMRegisterService.java | 124 --- .../em/DefaultEMUnregisterService.java | 79 -- .../am/service/em/ECMOperateService.java | 27 - .../am/service/em/EMRegisterService.java | 26 - .../am/service/em/EMUnregisterService.java | 28 - .../service/engine/AbstractEngineService.java | 41 - .../engine/DefaultEngineAskEngineService.java | 198 ---- .../DefaultEngineConnCanKillService.java | 196 ---- .../engine/DefaultEngineCreateService.java | 441 -------- .../engine/DefaultEngineInfoService.java | 145 --- .../engine/DefaultEngineOperateService.java | 39 - .../engine/DefaultEngineRecycleService.java | 92 -- .../engine/DefaultEngineReuseService.java | 272 ----- .../engine/DefaultEngineStopService.java | 316 ------ .../EngineConnStatusCallbackService.java | 25 - .../am/service/engine/EngineInfoService.java | 52 - .../service/engine/EngineOperateService.java | 28 - .../am/service/engine/EngineStopService.java | 61 -- .../service/heartbeat/AMHeartbeatService.java | 86 -- ...efaultEngineConnStatusCallbackService.java | 39 +- .../impl/ECResourceInfoServiceImpl.java | 86 +- .../service/monitor/NodeHeartbeatMonitor.java | 372 ------- .../linkis/manager/am/util/EMUtils.java | 123 +++ .../linkis/manager/am/util/LinkisUtils.java | 248 ----- .../linkis/manager/am/utils/AMUtils.java | 347 ------- .../manager/am/utils/DefaultRetryHandler.java | 122 --- .../linkis/manager/am/vo/AMEngineNodeVo.java | 7 +- .../linkis/manager/am/vo/CanCreateECRes.java | 99 ++ .../apache/linkis/manager/am/vo/ConfigVo.java | 63 +- .../apache/linkis/manager/am/vo/EMNodeVo.java | 16 +- .../manager/label/conf/LabelManagerConf.java | 29 - .../label/score/DefaultNodeLabelScorer.java | 205 ---- .../label/service/NodeLabelRemoveService.java | 25 - .../label/service/NodeLabelService.java | 86 -- .../impl/DefaultNodeLabelAddService.java | 69 -- .../impl/DefaultNodeLabelRemoveService.java | 82 -- .../service/impl/DefaultNodeLabelService.java | 497 --------- .../impl/DefaultResourceLabelService.java | 161 --- .../service/impl/DefaultUserLabelService.java | 167 --- .../manager/rm/conf/ResourceStatus.java | 4 +- .../manager/rm/domain/RMLabelContainer.java | 52 +- .../rm/entity/LabelResourceMapping.java | 68 -- .../manager/rm/exception/RMErrorCode.java | 30 +- .../KubernetesResourceRequester.java | 209 ---- .../KubernetesResourceIdentifierParser.java | 38 - .../impl/ExternalResourceServiceImpl.java | 20 +- .../rm/external/yarn/YarnQueueInfo.java | 86 ++ .../external/yarn/YarnResourceRequester.java | 95 +- .../manager/rm/message/RMMessageService.java | 73 -- .../manager/rm/restful/RMMonitorRest.java | 880 ---------------- .../rm/service/RequestResourceService.java | 473 --------- .../rm/service/ResourceLockService.java | 95 -- .../manager/rm/service/impl/ChangeType.java | 40 - .../impl/DefaultReqResourceService.java | 37 - .../service/impl/DefaultResourceManager.java | 886 ---------------- ...DriverAndKubernetesReqResourceService.java | 133 --- .../impl/DriverAndYarnReqResourceService.java | 151 --- .../impl/LabelResourceServiceImpl.java | 102 -- .../rm/service/impl/ResourceLogService.java | 217 ---- .../rm/service/impl/UserResourceService.java | 128 --- .../linkis/manager/rm/utils/RMUtils.java | 288 ------ .../rm/utils/RequestKerberosUrlUtils.java | 6 + .../manager/rm/utils/UserConfiguration.java | 227 ----- .../EngineConnBmlResourceMapper.xml | 44 +- .../ExternalResourceProviderDaoImpl.xml | 4 +- .../mysql/EngineConnBmlResourceMapper.xml | 93 -- .../loader/EngineConnPluginLoaderConf.scala | 44 + .../conf/EngineConnPluginConfiguration.scala | 42 + .../EngineConnLaunchInterceptor.scala} | 14 +- .../loader/EngineConnPluginsLoader.scala} | 18 +- ...stractEngineConnBmlResourceGenerator.scala | 123 +++ ...efaultEngineConnBmlResourceGenerator.scala | 109 ++ .../EngineConnBmlResourceGenerator.scala} | 27 +- .../DefaultEngineConnLaunchService.scala | 90 ++ ...aultEngineConnResourceFactoryService.scala | 64 ++ .../DefaultEngineConnResourceService.scala | 324 ++++++ .../service/EngineConnLaunchService.scala} | 15 +- .../EngineConnResourceFactoryService.scala} | 19 +- .../service/EngineConnResourceService.scala | 78 ++ .../manager/am/hook/AskEngineConnHook.scala | 9 +- .../am/hook/AskEngineConnHookContext.scala} | 12 +- .../am/selector/DefaultNodeSelector.scala | 75 ++ .../manager/am/selector/ECAvailableRule.scala | 55 + .../manager/am/selector/NodeSelector.scala} | 15 +- .../rule/AvailableNodeSelectRule.scala | 60 ++ .../rule/ConcurrencyNodeSelectRule.scala | 34 +- .../selector/rule/HotspotExclusionRule.scala} | 56 +- .../am/selector/rule/NewECMStandbyRule.scala | 83 ++ .../am/selector/rule/NodeSelectRule.scala} | 9 +- .../rule/OverLoadNodeSelectRule.scala | 66 ++ .../rule/ResourceNodeSelectRule.scala | 101 ++ .../selector/rule/ScoreNodeSelectRule.scala | 55 + .../rule/TaskInfoNodeSelectRule.scala | 63 ++ .../manager/am/service/EMEngineService.scala} | 27 +- .../manager/am/service/EngineService.scala | 12 +- .../am/service/HeartbeatService.scala} | 9 +- .../ConfCacheRemoveBroadcastListener.scala | 70 ++ .../service/em/DefaultECMOperateService.scala | 36 + .../service/em/DefaultEMEngineService.scala | 216 ++++ .../am/service/em/DefaultEMInfoService.scala | 310 ++++++ .../service/em/DefaultEMRegisterService.scala | 121 +++ .../em/DefaultEMUnregisterService.scala | 84 ++ .../am/service/em/ECMOperateService.scala} | 13 +- .../am/service/em/EMInfoService.scala} | 24 +- .../am/service/em/EMRegisterService.scala} | 23 +- .../am/service/em/EMUnregisterService.scala | 12 +- .../engine/AbstractEngineService.scala | 30 +- .../DefaultEngineAskEngineService.scala | 329 ++++++ .../DefaultEngineConnCanKillService.scala | 173 ++++ .../engine/DefaultEngineCreateService.scala | 474 +++++++++ .../engine/DefaultEngineInfoService.scala | 161 +++ .../engine/DefaultEngineOperateService.scala | 45 +- .../engine/DefaultEngineRecycleService.scala | 82 ++ .../engine/DefaultEngineReuseService.scala | 270 +++++ .../engine/DefaultEngineStopService.scala | 343 +++++++ .../engine/DefaultEngineSwitchService.scala} | 18 +- .../engine/EngineAskEngineService.scala | 33 +- .../engine/EngineConnCanKillService.scala} | 9 +- .../service/engine/EngineCreateService.scala} | 17 +- .../am/service/engine/EngineInfoService.scala | 53 + .../am/service/engine/EngineKillService.scala | 22 +- .../service/engine/EngineOperateService.scala | 24 +- .../engine/EngineRecycleService.scala} | 15 +- .../service/engine/EngineReuseService.scala} | 17 +- .../am/service/engine/EngineStopService.scala | 75 ++ .../service/engine/EngineSwitchService.scala} | 14 +- .../heartbeat/AMHeartbeatService.scala | 112 +++ .../monitor/NodeHeartbeatMonitor.scala | 493 +++++++++ .../linkis/manager/am/utils/AMUtils.scala | 310 ++++++ .../manager/label/LabelManagerUtils.scala} | 26 +- .../label/score/DefaultNodeLabelScorer.scala | 182 ++++ .../score/LabelScoreServiceInstance.scala | 35 + .../label/score/NodeLabelScorer.scala} | 35 +- .../label/service/NodeLabelAddService.scala} | 12 +- .../service/NodeLabelRemoveService.scala | 26 + .../label/service/NodeLabelService.scala | 103 ++ .../label/service/ResourceLabelService.scala} | 32 +- .../label/service/UserLabelService.scala | 30 +- .../impl/DefaultNodeLabelAddService.scala | 56 ++ .../impl/DefaultNodeLabelRemoveService.scala | 75 ++ .../impl/DefaultNodeLabelService.scala | 549 ++++++++++ .../impl/DefaultResourceLabelService.scala | 161 +++ .../impl/DefaultUserLabelService.scala | 123 +++ .../manager/rm/entity/LabelResourceMap.scala} | 39 +- .../rm/entity/ResourceOperationType.scala} | 10 +- .../manager/rm/message/RMMessageService.scala | 68 ++ .../manager/rm/restful/RMMonitorRest.scala | 843 ++++++++++++++++ .../rm/service/LabelResourceService.scala} | 39 +- .../rm/service/RequestResourceService.scala | 531 ++++++++++ .../rm/service/ResourceLockService.scala | 84 ++ .../manager/rm/service/ResourceManager.scala} | 68 +- .../impl/DefaultReqResourceService.scala | 21 +- .../service/impl/DefaultResourceManager.scala | 950 ++++++++++++++++++ .../DriverAndYarnReqResourceService.scala | 187 ++++ .../impl/LabelResourceServiceImpl.scala | 106 ++ .../rm/service/impl/ResourceLogService.scala | 234 +++++ .../rm/service/impl/UserResourceService.scala | 116 +++ .../utils/AcrossClusterRulesJudgeUtils.scala | 246 +++++ .../linkis/manager/rm/utils/RMUtils.scala | 365 +++++++ .../manager/rm/utils/UserConfiguration.scala | 174 ++++ .../service/common/label/LabelFilter.scala} | 13 +- .../common/label/ManagerLabelService.scala} | 18 +- .../src/test/java/org/apache/linkis/Scan.java | 26 - .../apache/linkis/WebApplicationServer.java | 34 - .../engineplugin/server/dao/BaseDaoTest.java | 31 - .../dao/EngineConnBmlResourceDaoTest.java | 111 -- ...Test.java => ECResourceInfoUtilsTest.java} | 2 +- .../dao/ExternalResourceProviderDaoTest.java | 40 - .../src/test/resources/application.properties | 39 - .../src/test/resources/create.sql | 47 - .../src/test/resources/create_pg.sql | 48 - .../src/test/resources/linkis.properties | 25 - .../label/conf/LabelManagerConfTest.scala | 35 + .../linkis/manager/rm/utils/RMUtilsTest.scala | 43 + .../manager/label/conf/LabelCommonConfig.java | 6 +- .../manager/label/entity/engine/RunType.scala | 2 +- .../manager/label/utils/LabelUtil.scala | 21 + .../manager/label/TestLabelBuilder.java | 5 +- .../entity/engine/EngineTypeLabelTest.java | 65 -- .../common/conf/ManagerCommonConf.java | 35 - .../manager/common/conf/RMConfiguration.java | 49 - .../manager/common/constant/AMConstant.java | 6 + .../common/entity/metrics/AMNodeMetrics.java | 11 + .../common/entity/metrics/NodeMetrics.java | 2 + .../manager/common/entity/node/AMEMNode.java | 11 + .../common/entity/node/AMEngineNode.java | 12 + .../manager/common/entity/node/AMNode.java | 4 + .../entity/persistence/PersistenceLabel.java | 17 +- .../persistence/PersistenceNodeMetrics.java | 11 + .../persistence/PersistencerEcNodeInfo.java | 4 +- .../common/entity/resource/CPUResource.java | 2 +- .../entity/resource/CommonNodeResource.java | 30 + .../resource/DriverAndYarnResource.java | 111 +- .../common/entity/resource/LoadResource.java | 2 +- .../entity/resource/MemoryResource.java | 2 +- .../common/entity/resource/NodeResource.java | 12 + .../common/entity/resource/Resource.java | 16 - .../common/entity/resource/ResourceType.java | 2 + .../entity/resource/SpecialResource.java | 4 +- .../ManagerCommonErrorCodeSummary.java | 3 +- .../manager/common/operator/Operator.java | 56 -- .../common/operator/OperatorFactoryImpl.java | 72 -- .../common/protocol/RequestManagerUnlock.java | 59 -- .../common/protocol/ResponseEngineLock.java | 56 -- .../common/protocol/ResponseEngineUnlock.java | 36 - .../common/protocol/em/ECMOperateRequest.java | 51 - .../protocol/em/ECMOperateResponse.java | 52 - .../em/EMResourceRegisterRequest.java | 79 -- .../common/protocol/em/RegisterEMRequest.java | 79 -- .../protocol/em/RegisterEMResponse.java | 50 - .../engine/EngineAskAsyncResponse.java | 59 -- .../protocol/engine/EngineAsyncResponse.java | 24 - .../engine/EngineConnStatusCallback.java | 68 -- .../engine/EngineConnStatusCallbackToAM.java | 69 -- .../protocol/engine/EngineCreateError.java | 66 -- .../protocol/engine/EngineCreateSuccess.java | 53 - .../protocol/engine/EngineOperateRequest.java | 42 - .../engine/EngineOperateResponse.java | 53 - .../protocol/label/LabelReportRequest.java | 43 - .../protocol/node/NodeHeartbeatMsg.java | 7 +- .../protocol/resource/ResourceProtocol.java | 22 - .../resource/ResourceUsedProtocol.java | 51 - .../protocol/resource/ResourceWithStatus.java | 103 -- .../resource/ResponseTaskRunningInfo.java | 74 -- .../manager/common/utils/ManagerUtils.java | 7 - .../manager/common/utils/ResourceUtils.java | 30 +- .../linkis/manager/rm/NotEnoughResource.java | 36 - .../manager/rm/RequestResourceAndWait.java | 47 - .../manager/common/operator/Operator.scala | 44 + .../common/operator/OperatorFactory.scala | 64 ++ .../manager/common/protocol/EngineLock.scala} | 39 +- .../common/protocol/OperateRequest.scala} | 34 +- .../common/protocol/OperateResponse.scala | 13 +- .../protocol/em/ECMOperateRequest.scala} | 32 +- .../protocol/em/ECMOperateResponse.scala | 32 + .../em/EMResourceRegisterRequest.scala | 67 ++ .../protocol/em/RegisterEMRequest.scala | 67 ++ .../protocol/em/RegisterEMResponse.scala | 24 + .../protocol/engine/EngineAsyncResponse.scala | 38 + .../engine/EngineConnStatusCallback.scala | 55 + .../engine/EngineOperateRequest.scala | 31 + .../engine/EngineOperateResponse.scala | 32 + .../protocol/label/LabelUpdateRequest.scala | 21 +- .../protocol/resource/ResourceProtocol.scala | 57 ++ .../resource/ResourceWithStatus.scala | 21 +- .../linkis/manager/rm/ResourceInfo.scala} | 8 +- .../linkis/manager/rm/ResultResource.scala | 9 +- .../manager/dao/ECResourceRecordMapper.java | 3 +- .../linkis/manager/dao/LockManagerMapper.java | 7 + .../linkis/manager/dao/NodeManagerMapper.java | 4 +- ...kisManagerPersistenceErrorCodeSummary.java | 4 +- .../persistence/NodeManagerPersistence.java | 21 +- .../NodeMetricManagerPersistence.java | 12 +- .../ResourceManagerPersistence.java | 6 +- .../impl/DefaultLabelManagerPersistence.java | 8 + .../impl/DefaultLockManagerPersistence.java | 30 +- .../impl/DefaultNodeManagerPersistence.java | 70 +- .../DefaultNodeMetricManagerPersistence.java | 27 +- .../impl/DefaultResourceLabelPersistence.java | 17 +- .../DefaultResourceManagerPersistence.java | 12 +- .../manager/util/PersistenceManagerConf.java | 5 - .../mapper/common/ECResourceRecordMapper.xml | 109 ++ .../LabelManagerMapper.xml | 54 +- .../mapper/common/NodeManagerMapper.xml | 49 +- .../mapper/common/NodeMetricManagerMapper.xml | 9 +- .../ResourceManagerMapper.xml | 23 +- .../mapper/mysql/ECResourceRecordMapper.xml | 129 --- .../mapper/mysql/LabelManagerMapper.xml | 585 ----------- .../mapper/mysql/ResourceManagerMapper.xml | 148 --- .../postgresql/ECResourceRecordMapper.xml | 127 --- .../dao/ECResourceRecordMapperTest.java | 134 --- .../manager/dao/LabelManagerMapperTest.java | 293 ------ .../manager/dao/NodeManagerMapperTest.java | 1 - .../dao/ResourceManagerMapperTest.java | 45 +- .../src/test/resources/application.properties | 20 +- .../src/test/resources/create.sql | 55 +- .../src/test/resources/create_pg.sql | 189 ---- linkis-dist/bin/install.sh | 24 +- .../linkis/templates/configmap-init-sql.yaml | 12 +- .../templates/configmap-linkis-config.yaml | 15 +- .../package/admin/configuration_helper.sh | 6 +- .../conf/linkis-cg-linkismanager.properties | 2 +- .../conf/linkis-cli/linkis-cli.properties | 2 +- linkis-dist/package/conf/linkis.properties | 19 +- linkis-dist/package/db/linkis_ddl.sql | 398 ++++++-- linkis-dist/package/db/linkis_dml.sql | 162 +-- linkis-dist/package/db/linkis_dml_pg.sql | 6 +- linkis-dist/package/db/module/linkis-mg.sql | 6 +- .../upgrade/1.7.0_schema/mysql/linkis_ddl.sql | 104 ++ .../upgrade/1.7.0_schema/mysql/linkis_dml.sql | 104 ++ .../executor/DorisEngineConnExecutor.java | 6 +- ...cSearchProcessEngineConnLaunchBuilder.java | 2 +- .../ElasticSearchEngineConnExecutor.java | 6 +- .../executor/client/ResponseHandler.scala | 11 +- .../client/impl/ResponseHandlerImpl.scala | 6 +- .../errorcode/FlinkErrorCodeSummary.java | 132 +++ .../exception/ExecutorInitException.java | 45 + .../exception/FlinkInitFailedException.java | 36 + .../exception/JobExecutionException.java | 36 + .../exception/SqlExecutionException.java | 36 + .../flink/exception/SqlParseException.java} | 18 +- .../flink/executor/FlinkExecutor.scala | 2 +- .../FlinkManagerConcurrentExecutor.scala | 103 ++ .../flink/operator/KillOperator.scala | 32 +- .../flink/operator/ListOperator.scala | 17 +- .../flink/operator/StatusOperator.scala | 22 +- .../operator/TriggerSavepointOperator.scala | 25 +- .../FlinkRestClientManager.scala | 21 +- .../hive/src/main/resources/log4j2.xml | 4 +- .../hive/creation/HiveEngineConnFactory.scala | 2 +- .../HiveEngineConcurrentConnExecutor.scala | 4 +- .../executor/HiveEngineConnExecutor.scala | 11 +- .../impala/conf/ImpalaConfiguration.scala | 2 +- .../io_file/src/main/resources/log4j2.xml | 6 +- .../io/executor/IoEngineConnExecutor.scala | 97 +- .../io/service/FsProxyService.scala | 18 +- .../engineplugin/io/utils/IOHelp.scala | 34 +- .../executor/IoEngineConnExecutorTest.java | 77 -- .../src/test/resources/testIoResult.dolphin | 0 .../engineplugin/jdbc/ConnectionManager.java | 6 - .../jdbc/errorcode/JDBCErrorCodeSummary.java | 4 +- .../JDBCProcessEngineConnLaunchBuilder.scala | 2 +- .../jdbc/conf/JDBCConfiguration.scala | 6 + .../executor/JDBCEngineConnExecutor.scala | 24 +- .../jdbc/executor/JDBCHelper.java | 38 +- .../executor/JDBCMultiDatasourceParser.scala | 22 +- .../JDBCMultiDatasourceParserTest.scala | 6 +- .../executor/TestJDBCEngineConnExecutor.scala | 7 +- .../nebula/src/main/assembly/distribution.xml | 2 +- .../executor/NebulaEngineConnExecutor.java | 6 +- .../OpenLooKengProcessECLaunchBuilder.java | 2 +- .../OpenLooKengEngineConnExecutor.java | 4 +- .../pipeline/src/main/resources/log4j2.xml | 3 + .../executor/PipelineEngineConnExecutor.scala | 3 - .../PrestoProcessEngineConnLaunchBuilder.java | 2 +- .../executor/PrestoEngineConnExecutor.java | 4 +- .../factory/TestPrestoEngineConnFactory.java | 4 + .../python/PythonEngineConnPlugin.java | 80 -- .../conf/PythonEngineConfiguration.java | 51 - .../LinkisPythonErrorCodeSummary.java | 2 +- .../executor/PythonEngineConnExecutor.java | 189 ---- .../python/hook/PythonVersionEngineHook.java | 78 -- .../engineplugin/python/utils/Kind.java | 92 -- .../python/src/main/resources/log4j2.xml | 3 + .../src/main/resources/python/python.py | 4 +- .../python/PythonEngineConnPlugin.scala | 75 ++ .../conf/PythonEngineConfiguration.scala | 49 + .../exception/NoSupportEngineException.scala} | 15 +- .../PythonSessionNullException.scala | 35 + .../executor/PythonEngineConnExecutor.scala | 172 ++++ .../python/executor/PythonSession.scala | 43 +- .../factory/PythonEngineConnFactory.scala | 2 +- .../python/hook/PythonVersionEngineHook.scala | 67 ++ ...ythonProcessEngineConnLaunchBuilder.scala} | 6 +- .../engineplugin/python/utils/Kind.scala | 104 ++ .../engineplugin/python/utils/State.scala | 74 ++ .../TestNoSupportEngineException.java | 47 - .../TestPythonEngineConnExecutor.java | 82 -- .../factory/TestPythonEngineConnFactory.java | 44 - .../python/TestPythonEngineConnPlugin.scala} | 14 +- .../conf/TestPythonEngineConfiguration.scala} | 32 +- .../TestNoSupportEngineException.scala | 95 ++ .../TestPythonEngineConnExecutor.scala | 82 ++ .../factory/TestPythonEngineConnFactory.scala | 47 + .../engineplugin/python/utils/TestKind.scala} | 27 +- .../engineplugin/python/utils/TestState.scala | 32 +- .../repl/executor/ReplEngineConnExecutor.java | 6 +- .../shell/ShellEngineConnPlugin.java | 81 -- .../common/ShellEngineConnPluginConst.java | 24 - .../shell/conf/ShellEngineConnConf.java | 30 - .../exception/ShellCodeErrorException.java | 28 - .../shell/executor/ReaderThread.java | 97 -- .../shell/executor/ShellECTaskInfo.java | 54 - .../ShellEngineConnConcurrentExecutor.java | 151 --- .../executor/ShellEngineConnExecutor.java | 339 ------- .../shell/executor/YarnAppIdExtractor.java | 53 - .../conf/linkis-engineconn.properties | 3 +- .../shell/src/main/resources/conf/log4j2.xml | 5 +- .../shell/ShellEngineConnPlugin.scala | 76 ++ ...ShellProcessEngineConnLaunchBuilder.scala} | 6 +- .../shell/common/ShellEnginePluginConst.scala | 11 +- .../shell/conf/ShellEngineConnConf.scala | 14 +- .../exception/NoCorrectUserException.scala | 27 + .../shell/executor/ReaderThread.scala | 105 ++ .../shell/executor/ShellECTaskInfo.scala | 20 + .../ShellEngineConnConcurrentExecutor.scala | 378 +++++++ .../executor/ShellEngineConnExecutor.scala | 337 +++++++ .../shell/executor/YarnAppIdExtractor.scala | 81 ++ .../factory/ShellEngineConnFactory.scala | 17 +- .../executor/TestShellEngineConnExecutor.java | 62 -- .../shell/TestShellEngineConnPlugin.scala} | 20 +- .../TestShellEngineConnPluginConst.scala} | 17 +- .../TestNoCorrectUserException.scala} | 20 +- .../TestShellEngineConnExecutor.scala | 62 ++ .../spark/datacalc/util/PluginUtil.java | 1 - .../resources/linkis-engineconn.properties | 2 +- .../src/main/resources/python/mix_pyspark.py | 31 +- .../engineplugin/spark/common/SparkKind.scala | 22 - .../spark/config/SparkConfiguration.scala | 5 +- .../spark/datacalc/source/SolrSource.scala | 44 - .../spark/executor/SQLSession.scala | 4 +- .../executor/SparkEngineConnExecutor.scala | 42 +- .../spark/executor/SparkPythonExecutor.scala | 8 +- .../spark/executor/SparkScalaExecutor.scala | 4 +- .../executor/SparkSubmitOnceExecutor.scala | 2 +- .../factory/SparkEngineConnFactory.scala | 4 + .../spark/imexport/ExportData.scala | 13 +- .../spark/imexport/LoadData.scala | 57 +- ...SubmitProcessEngineConnLaunchBuilder.scala | 18 +- .../spark/mdq/MDQPostExecutionHook.scala | 14 +- .../spark/mdq/MDQPreExecutionHook.scala | 17 +- .../spark/utils/EngineUtils.scala | 4 +- .../datasources/csv/DolphinToSpark.scala | 26 +- .../spark/src/test/resources/etltest.dolphin | 4 - .../spark/cs/TestCSSparkHelper.scala | 5 + .../cs/TestCSSparkPostExecutionHook.scala | 5 + .../spark/datacalc/TestExcelCala.scala | 135 --- .../spark/datacalc/TestKafkaCala.scala | 118 --- .../spark/datacalc/TestRedisCalc.scala | 200 ---- .../spark/datacalc/TestRocketmqCala.scala | 71 -- .../spark/executor/TestSparkSqlExecutor.scala | 72 +- .../sqoop/src/main/assembly/distribution.xml | 4 - .../sqoop/client/config/ParamsMapping.java | 6 + .../sqoop/src/main/resources/log4j2.xml | 18 +- .../sqoop/client/RemoteClientHolder.scala | 97 ++ .../DataSourceRpcErrorException.scala | 21 +- .../executor/SqoopOnceCodeExecutor.scala | 10 +- .../sqoop/params/ConnectParamsResolver.scala | 82 ++ .../SqoopDataSourceParamsResolver.scala | 143 +++ .../trino/TrinoEngineConnPlugin.java | 78 -- .../trino/conf/TrinoConfiguration.java | 90 -- .../trino/conf/TrinoEngineConfig.java | 53 - .../trino/exception/TrinoClientException.java | 26 - .../exception/TrinoGrantmaException.java | 26 - .../exception/TrinoModifySchemaException.java | 26 - .../exception/TrinoStateInvalidException.java | 26 - .../executor/TrinoEngineConnExecutor.java | 561 ----------- .../password/CommandPasswordCallback.java | 4 +- .../password/StaticPasswordCallback.java | 2 +- .../engineplugin/trino/utils/TrinoCode.java | 47 - .../resources/linkis-engineconn.properties | 1 - .../trino/src/main/resources/log4j2.xml | 3 + .../trino/TrinoEngineConnPlugin.scala | 66 ++ ...TrinoProcessEngineConnLaunchBuilder.scala} | 24 +- .../trino/conf/TrinoConfiguration.scala | 73 ++ .../trino/conf/TrinoEngineConfig.scala | 48 + .../trino/exception/TrinoException.scala | 32 + .../executor/TrinoEngineConnExecutor.scala | 563 +++++++++++ .../engineplugin/trino/utils/TrinoCode.scala | 65 ++ .../trino/utils/TrinoSQLHook.scala} | 18 +- linkis-extensions/linkis-et-monitor/pom.xml | 23 +- .../src/main/assembly/distribution.xml | 3 - .../service/impl/CleanerServiceImpl.java | 3 +- .../config/ApplicationConfiguration.java | 48 + .../linkis/monitor/config/MonitorConfig.java | 5 +- .../monitor/jobhistory/entity/JobHistory.java | 186 +++- .../linkis/monitor/scheduled/BmlClear.java | 17 + .../scheduled/EntranceTaskMonitor.java | 47 +- .../monitor/scheduled/JobHistoryClear.java | 1 + .../monitor/scheduled/JobHistoryMonitor.java | 127 +-- .../monitor/scheduled/ResourceClear.java | 49 + .../monitor/scheduled/ResourceMonitor.java | 199 ++-- .../monitor/scheduled/TaskArchiveClear.java | 53 + .../linkis/monitor/until/JobMonitorUtils.java | 88 ++ .../mapper/common/InsLabelRelationMapper.xml | 15 +- .../mapper/common/InstanceInfoMapper.xml | 13 +- .../mapper/common/InstanceLabelMapper.xml | 13 +- .../mapper/common/JobHistoryMapper.xml | 74 +- ...kisJobHistoryScanSpringConfiguration.scala | 5 +- .../linkis/monitor/constants/Constants.scala | 12 +- .../jobhistory/JobHistoryDataFetcher.scala | 5 +- .../errorcode/JobHistoryErrCodeRule.scala | 25 +- .../jobhistory/index/JobIndexHitEvent.scala | 22 + .../jobhistory/index/JobIndexRule.scala | 82 ++ .../jobhistory/index/JobIndexSender.scala | 25 + .../jobtime/JobTimeExceedAlertSender.scala | 3 +- .../labels/JobHistoryLabelsAlertSender.scala | 6 +- .../labels/JobHistoryLabelsRule.scala | 76 +- .../runtime/CommonJobRunTimeRule.scala | 15 +- .../runtime/JobHistoryRunTimeRule.scala | 15 +- .../utils/alert/ims/ImsAlertDesc.scala | 32 +- .../alert/ims/ImsAlertPropFileData.scala | 1 + .../monitor/utils/alert/ims/ImsRequest.scala | 1 + .../utils/alert/ims/MonitorAlertUtils.scala | 54 +- .../utils/alert/ims/UserLabelAlertUtils.scala | 47 - .../utils/alert/PooledImsAlertSenderTest.java | 3 + .../src/test/resources/log4j2-console.xml | 13 +- .../storage/io/client/DefaultIOClient.scala | 4 +- .../io/iteraceptor/IOMethodInterceptor.scala | 14 +- .../IOMethodInterceptorCreatorImpl.scala | 4 +- .../storage/io/utils/IOClientUtils.scala | 3 +- .../DefaultCodeExecTaskExecutorManager.scala | 7 +- .../monitor/EngineConnMonitor.scala | 3 +- .../physical/CodeLogicalUnitExecTask.scala | 6 +- .../ComputationTaskExecutionReceiver.scala | 10 +- .../ecm/ComputationEngineConnManager.scala | 54 +- .../ecm/cache/EngineAsyncResponseCache.scala | 16 +- .../orchestrator/ecm/conf/ECMPluginConf.scala | 2 + .../impl/ComputationEngineConnExecutor.scala | 2 +- .../DefaultEngineAsyncResponseService.scala | 14 +- .../execution/AbstractExecution.scala | 26 - .../execution/impl/DefaultTaskManager.scala | 47 +- .../execution/impl/ExecutionImpl.scala | 27 +- .../execution/ExecutionTaskEvent.scala | 13 +- .../bml/service/impl/TaskServiceImpl.java | 11 +- .../BmlProjectMapper.xml | 73 +- .../mapper/common/DownloadMapper.xml | 6 +- .../{mysql => common}/ResourceMapper.xml | 32 +- .../{postgresql => common}/TaskMapper.xml | 8 +- .../{postgresql => common}/VersionMapper.xml | 85 +- .../mapper/mysql/BmlProjectMapper.xml | 78 -- .../resources/mapper/mysql/TaskMapper.xml | 69 -- .../resources/mapper/mysql/VersionMapper.xml | 243 ----- .../mapper/postgresql/ResourceMapper.xml | 110 -- .../linkis/bml/dao/BmlProjectDaoTest.java | 5 - .../src/test/resources/application.properties | 5 +- .../src/test/resources/create_pg.sql | 129 --- .../dao/AcrossClusterRuleMapper.java | 3 +- .../api/AcrossClusterRuleRestfulApi.java | 5 +- .../restful/api/ConfigurationRestfulApi.java | 14 +- .../restful/api/TemplateRestfulApi.java | 3 + .../api/TenantConfigrationRestfulApi.java | 9 +- .../api/UserIpConfigrationRestfulApi.java | 2 +- .../service/AcrossClusterRuleService.java | 7 +- .../service/TenantConfigService.java | 2 + .../impl/AcrossClusterRuleServiceImpl.java | 10 +- .../impl/TemplateConfigKeyServiceImpl.java | 5 +- .../service/impl/TenantConfigServiceImpl.java | 8 +- .../service/impl/UserIpConfigServiceImpl.java | 2 +- .../AcrossClusterRuleMapper.xml | 2 + .../ConfigKeyLimitForUserMapper.xml | 0 .../mapper/{mysql => common}/ConfigMapper.xml | 4 +- .../DepartmentTenantMapper.xml} | 68 +- .../resources/mapper/common/LabelMapper.xml | 18 +- .../TemplateConfigKeyMapper.xml | 0 .../mapper/{mysql => common}/UserIpMapper.xml | 12 +- .../{mysql => common}/UserTenantMapper.xml | 4 +- .../postgresql/AcrossClusterRuleMapper.xml | 130 --- .../ConfigKeyLimitForUserMapper.xml | 145 --- .../mapper/postgresql/ConfigMapper.xml | 485 --------- .../postgresql/TemplateConfigKeyMapper.xml | 161 --- .../mapper/postgresql/UserIpMapper.xml | 91 -- .../configuration/conf/Configuration.scala | 8 - .../configuration/constant/Constants.scala | 2 +- .../service/CategoryService.scala | 7 +- .../configuration/dao/ConfigMapperTest.java | 39 +- .../configuration/dao/LabelMapperTest.java | 17 + .../exception/ConfigurationExceptionTest.java | 17 + .../api/ConfigurationRestfulApiTest.java | 24 +- .../src/test/resources/application.properties | 11 +- .../src/test/resources/create.sql | 2 +- .../src/test/resources/create_pg.sql | 148 --- .../ContextValueTypeConditionParser.java | 19 +- .../linkis/cs/conf/CSConfiguration.java | 12 +- .../contextHistoryMapper.xml | 22 +- .../contextIDListenerMapper.xml | 11 +- .../{mysql => common}/contextIDMapper.xml | 4 +- .../common/contextKeyListenerMapper.xml | 4 +- .../{mysql => common}/contextMapMapper.xml | 6 +- .../postgresql/contextHistoryMapper.xml | 83 -- .../postgresql/contextIDListenerMapper.xml | 50 - .../mapper/postgresql/contextIDMapper.xml | 110 -- .../mapper/postgresql/contextMapMapper.xml | 172 ---- .../dao/ContextHistoryMapperTest.java | 6 +- .../persistence/dao/ContextIDMapperTest.java | 12 +- .../persistence/dao/ContextMapMapperTest.java | 266 ----- .../cs/server/conf/ContextServerConfTest.java | 1 + .../src/test/resources/application.properties | 5 +- .../src/test/resources/application.yml | 13 - .../src/test/resources/create_pg.sql | 90 -- .../label/InsLabelAutoConfiguration.java | 6 +- .../label/restful/InstanceRestful.java | 23 +- .../service/impl/SpringInsLabelService.java | 2 - .../InsLabelRelationMapper.xml | 77 +- .../{mysql => common}/InstanceInfoMapper.xml | 22 +- .../{mysql => common}/InstanceLabelMapper.xml | 40 +- .../postgresql/InsLabelRelationMapper.xml | 205 ---- .../mapper/postgresql/InstanceInfoMapper.xml | 55 - .../mapper/postgresql/InstanceLabelMapper.xml | 114 --- .../label/dao/InsLabelRelationDaoTest.java | 10 - .../label/dao/InstanceInfoDaoTest.java | 91 -- .../label/dao/InstanceLabelDaoTest.java | 5 - .../src/test/resources/application.properties | 3 +- .../src/test/resources/create_pg.sql | 65 -- .../jobhistory/dao/JobHistoryMapper.java | 29 +- .../linkis/jobhistory/entity/JobHistory.java | 159 +-- .../linkis/jobhistory/entity/QueryTaskVO.java | 46 + .../restful/api/QueryRestfulApi.java | 427 ++++++-- .../jobhistory/util/JobhistoryUtils.java | 169 ++++ .../mapper/common/JobDetailMapper.xml | 22 +- .../mapper/mysql/JobHistoryMapper.xml | 121 ++- .../conf/JobhistoryConfiguration.scala | 5 + .../conversions/TaskConversions.scala | 63 +- .../service/JobHistoryQueryService.java | 16 +- .../impl/JobHistoryQueryServiceImpl.scala | 78 +- .../linkis/jobhistory/util/QueryUtils.scala | 68 +- .../jobhistory/dao/JobDetailMapperTest.java | 95 -- .../jobhistory/dao/JobHistoryMapperTest.java | 8 +- .../restful/api/QueryRestfulApiTest.java | 179 ---- .../service/JobHistoryQueryServiceTest.java | 12 +- .../src/test/resources/application.properties | 8 +- .../src/test/resources/create.sql | 4 +- .../src/test/resources/create_pg.sql | 62 -- .../errorcode/client/ClientConfiguration.java | 2 +- .../linkis/bml/conf/BmlConfiguration.scala | 2 +- .../cs/client/utils/ContextClientConf.scala | 2 +- .../config/DatasourceClientConfig.scala | 2 +- .../filesystem/conf/WorkspaceClientConf.scala | 2 +- .../BmlClientErrorCodeSummaryTest.java | 45 - .../builder/ContextClientFactoryTest.java | 35 - .../builder/HttpContextClientConfigTest.java | 33 - .../CsClientErrorCodeSummaryTest.java | 59 -- .../apache/linkis/cs/client/test/Test.java | 134 --- .../test/bean/ClientTestContextKey.java | 67 -- .../client/test/bean/ResourceValueBean.java | 45 - .../listener/CommonContextKeyListener.java | 40 - .../test/no_context_search/TestClear.java | 60 -- .../cs/client/test/restful/RestfulTest.java | 146 --- .../cs/client/test/service/TestInfo.java | 99 -- .../cs/client/test/service/TestRemove.java | 113 --- .../test/service/TestSearchService.java | 113 --- .../test_multiuser/TestChangeContext.java | 110 -- .../test_multiuser/TestCreateContext.java | 122 --- .../client/utils/ContextClientConfTest.java | 36 - .../client/utils/ContextServiceUtilsTest.java | 103 -- .../cs/client/utils/SerializeHelperTest.java | 60 -- .../client/TestDataSourceClient.scala | 123 --- .../datasource/client/TestHiveClient.scala | 144 --- .../datasource/client/TestMysqlClient.scala | 149 --- .../config/DatasourceClientConfigTest.java | 49 - .../DataSourceClientBuilderExceptionTest.java | 34 - .../imp/LinkisDataSourceRemoteClientTest.java | 37 - .../client/ClientConfigurationTest.java | 46 - .../client/ErrorCodeClientBuilderTest.java | 42 - .../client/LinkisErrorCodeClientTest.java | 44 - .../client/handler/ErrorCodeHandlerTest.java | 37 - .../handler/LinkisErrorCodeHandlerTest.java | 48 - .../manager/LinkisErrorCodeManagerTest.java | 45 - .../LinkisErrorCodeSynchronizerTest.java | 46 - .../client/action/ErrorCodeActionTest.scala | 41 - .../action/OpenScriptFromBMLActionTest.scala | 35 - .../conf/WorkspaceClientConfTest.scala | 45 - .../linkis/udf/entity/PythonModuleInfoVO.java | 209 ++++ .../api/rpc/RequestPythonModuleProtocol.scala | 28 + .../rpc/ResponsePythonModuleProtocol.scala | 22 +- .../entity/enumeration/ContextScopeTest.java | 40 - .../entity/enumeration/ContextTypeTest.java | 48 - .../common/entity/enumeration/DBTypeTest.java | 40 - .../entity/enumeration/WorkTypeTest.java | 40 - .../protocol/ContextHistoryTypeTest.java | 36 - .../cs/common/protocol/ContextIDTypeTest.java | 43 - .../common/protocol/ContextKeyTypeTest.java | 37 - .../protocol/ContextKeyValueTypeTest.java | 37 - .../common/protocol/ContextValueTypeTest.java | 37 - .../ContextSerializationHelperTest.java | 199 ---- ...CombinedNodeIDContextIDSerializerTest.java | 59 -- .../CommonContextKeySerializerTest.java | 60 -- .../LinkisBMLResourceSerializerTest.java | 63 -- .../data/CSResultDataSerializerTest.java | 60 -- .../data/LinkisJobDataSerializerTest.java | 61 -- .../value/metadata/CSTableSerializerTest.java | 65 -- .../object/CSFlowInfosSerializerTest.java | 57 -- .../test/ContextSerializationHelperTest.java | 187 ---- .../cs/common/utils/CSCommonUtilsTest.java | 67 -- .../utils/CSHighAvailableUtilsTest.java | 64 -- .../cs/listener/test/TestContextID.java | 35 - .../cs/listener/test/TestContextKeyValue.java | 49 - .../errorcode/common/CommonConfTest.java | 33 - .../query/common/MdmConfigurationTest.java | 37 - .../common/cache/CacheConfigurationTest.java | 40 - .../common/cache/ConnCacheManagerTest.java | 48 - .../MetaMethodInvokeExceptionTest.java | 34 - .../exception/MetaRuntimeExceptionTest.java | 34 - .../udf/excepiton/UDFExceptionTest.java | 33 - .../linkis/udf/utils/ConstantVarTest.java | 48 - .../server/response/EngineLabelResponse.java | 2 +- .../restful/GatewayAuthTokenRestfulApi.java | 46 + .../service/GatewayAuthTokenService.java | 2 + .../impl/GatewayAuthTokenServiceImpl.java | 8 + .../conf/WorkSpaceConfiguration.java | 15 +- .../constant/WorkSpaceConstants.java | 8 + .../exception/WorkspaceExceptionManager.java | 18 + .../restful/api/BMLFsRestfulApi.java | 11 +- .../filesystem/restful/api/FsRestfulApi.java | 557 +++++++++- .../linkis/filesystem/util/WorkspaceUtil.java | 4 + .../mapper/common/CgManagerLabelMapper.xml | 6 +- .../ConfigurationConfigKeyMapper.xml | 22 +- .../common/ConfigurationConfigValueMapper.xml | 11 +- .../ConfigurationKeyEngineRelationMapper.xml | 5 +- .../DatasourceAccessMapper.xml | 8 +- .../{mysql => common}/DatasourceEnvMapper.xml | 10 +- .../DatasourceTypeKeyMapper.xml | 10 +- .../DatasourceTypeMapper.xml | 12 +- .../EngineConnPluginBmlResourcesMapper.xml | 6 +- .../GatewayAuthTokenMapper.xml | 10 +- .../PsErrorCodeMapper.xml | 10 +- .../RmExternalResourceProviderMapper.xml | 12 +- .../UdfBaseInfoMapper.xml} | 18 +- .../{mysql => common}/UdfManagerMapper.xml | 6 +- .../{mysql => common}/UdfTreeMapper.xml | 12 +- .../mapper/{mysql => common}/VarMapper.xml | 8 +- .../mapper/mysql/PsErrorCodeMapper.xml | 47 - .../ConfigurationConfigKeyMapper.xml | 68 -- .../postgresql/DatasourceAccessMapper.xml | 49 - .../mapper/postgresql/DatasourceEnvMapper.xml | 51 - .../postgresql/DatasourceTypeKeyMapper.xml | 64 -- .../postgresql/DatasourceTypeMapper.xml | 50 - .../postgresql/GatewayAuthTokenMapper.xml | 51 - .../RmExternalResourceProviderMapper.xml | 47 - .../mapper/postgresql/UdfTreeMapper.xml | 51 - .../resources/mapper/postgresql/VarMapper.xml | 61 -- .../linkis/filesystem/service/FsService.scala | 20 + .../filesystem/validator/PathValidator.scala | 16 +- .../linkis/basedatamanager/server/Scan.java | 26 - .../server/WebApplicationServer.java | 34 - .../server/dao/BaseDaoTest.java | 31 - .../dao/ConfigurationConfigKeyMapperTest.java | 86 -- .../ConfigurationConfigValueMapperTest.java | 65 -- ...figurationKeyEngineRelationMapperTest.java | 53 - .../dao/DatasourceAccessMapperTest.java | 48 - .../server/dao/DatasourceEnvMapperTest.java | 55 - .../server/dao/DatasourceTypeMapperTest.java | 39 - ...ngineConnPluginBmlResourcesMapperTest.java | 56 -- .../dao/GatewayAuthTokenMapperTest.java | 55 - .../server/dao/PsErrorCodeMapperTest.java | 50 - .../RmExternalResourceProviderMapperTest.java | 52 - .../server/dao/UdfManagerMapperTest.java | 47 - .../server/dao/UdfTreeMapperTest.java | 54 - .../ConfigurationTemplateRestfulApiTest.java | 143 --- .../DatasourceAccessRestfulApiTest.java | 149 --- .../restful/DatasourceEnvRestfulApiTest.java | 138 --- .../DatasourceTypeKeyRestfulApiTest.java | 158 --- .../restful/DatasourceTypeRestfulApiTest.java | 159 --- .../restful/ErrorCodeRestfulApiTest.java | 154 --- .../GatewayAuthTokenRestfulApiTest.java | 161 --- .../server/restful/MvcUtils.java | 126 --- ...xternalResourceProviderRestfulApiTest.java | 154 --- .../restful/UdfManagerRestfulApiTest.java | 151 --- .../server/restful/UdfTreeRestfulApiTest.java | 159 --- .../service/DatasourceAccessServiceTest.java | 63 -- .../service/DatasourceEnvServiceTest.java | 65 -- .../service/DatasourceTypeKeyServiceTest.java | 65 -- .../service/DatasourceTypeServiceTest.java | 64 -- .../server/service/ErrorCodeServiceTest.java | 63 -- .../service/GatewayAuthTokenServiceTest.java | 63 -- ...RmExternalResourceProviderServiceTest.java | 60 -- .../server/service/UdfManagerServiceTest.java | 59 -- .../server/service/UdfTreeServiceTest.java | 60 -- .../apache/linkis/errorcode/server/Scan.java | 26 - .../server/WebApplicationServer.java | 34 - .../errorcode/server/dao/BaseDaoTest.java | 31 - .../server/dao/ErrorCodeMapperTest.java | 40 - .../restful/api/FsRestfulApiTest.java | 366 ++++--- .../java/org/apache/linkis/variable/Scan.java | 26 - .../linkis/variable/WebApplicationServer.java | 34 - .../linkis/variable/dao/BaseDaoTest.java | 31 - .../linkis/variable/dao/VarMapperTest.java | 124 --- .../linkis/variable/restful/MvcUtils.java | 115 --- .../restful/api/VariableRestfulApiTest.java | 112 --- .../variable/service/VariableServiceTest.java | 85 -- .../src/test/resources/application.properties | 29 +- .../resources/basedata_manager_create.sql | 451 --------- .../resources/basedata_manager_create_pg.sql | 252 ----- .../src/test/resources/error_code_create.sql | 34 - .../test/resources/error_code_create_pg.sql | 32 - .../src/test/resources/info.text | 19 - .../src/test/resources/variable_create.sql | 61 -- .../src/test/resources/variable_create_pg.sql | 59 -- .../common/lock/dao/CommonLockMapperTest.java | 13 + .../src/test/resources/application.properties | 10 +- .../src/test/resources/create.sql | 6 +- .../src/test/resources/create_pg.sql | 31 - .../src/test/resources/data.sql | 22 + .../apache/linkis/udf/api/UDFRestfulApi.java | 280 ++++++ .../udf/dao/PythonModuleInfoMapper.java} | 34 +- .../org/apache/linkis/udf/dao/UDFDao.java | 4 +- .../linkis/udf/entity/PythonModuleInfo.java | 158 +++ .../udf/service/PythonModuleInfoService.java} | 28 +- .../apache/linkis/udf/service/UDFService.java | 2 +- .../impl/PythonModuleInfoServiceImpl.java | 64 ++ .../udf/service/impl/UDFServiceImpl.java | 4 +- .../mapper/common/PythonModuleInfoMapper.xml | 93 ++ .../mapper/{postgresql => common}/UDFDao.xml | 184 ++-- .../resources/mapper/common/UDFTreeDao.xml | 56 +- .../{postgresql => common}/UDFVersionDao.xml | 30 +- .../main/resources/mapper/mysql/UDFDao.xml | 471 --------- .../resources/mapper/mysql/UDFVersionDao.xml | 130 --- .../linkis/udf/api/rpc/UdfReceiver.scala | 56 +- .../udf/api/rpc/UdfReceiverChooser.scala | 9 +- .../udf/dao/PythonModuleInfoMapperTest.java | 112 +++ .../service/PythonModuleInfoServiceTest.java | 129 +++ .../src/test/resources/application.properties | 11 +- .../src/test/resources/create.sql | 71 +- .../src/test/resources/create_pg.sql | 143 --- .../src/test/resources/data.sql | 61 ++ .../conf/TokenConfiguration.scala | 4 +- .../service/CachedTokenService.scala | 2 +- .../authentication/dao/TokenDaoTest.java | 2 +- .../service/CachedTokenServiceTest.java | 2 +- .../src/test/resources/create.sql | 2 +- .../src/test/resources/create_pg.sql | 2 +- .../gateway/config/GatewayConfiguration.scala | 7 +- .../linkis/gateway/security/UserRestful.scala | 12 + .../dws/config/DWSClientConfig.scala | 9 +- .../ujes/parser/ECMRequestGatewayParser.scala | 74 ++ .../parser/EntranceRequestGatewayParser.scala | 14 +- .../route/AbstractLabelGatewayRouter.scala | 31 +- .../http/IpPriorityLoadBalancer.java | 19 +- .../src/apps/URM/module/header/index.vue | 12 +- .../src/apps/linkis/module/header/index.vue | 8 +- linkis-web/src/dss/module/header/index.vue | 8 +- tool/dependencies/known-dependencies.txt | 12 +- 1121 files changed, 29759 insertions(+), 44534 deletions(-) delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/FSFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/conf/LinkisStorageConf.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/csv/StorageCSVWriter.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/DataType.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/Dolphin.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntity.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntitySerializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelXlsReader.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/FirstRowDeal.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/RowToCsvDeal.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageExcelWriter.java rename linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/ExecuteException.java => linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColLengthExceedException.java (65%) create mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColumnIndexExceedException.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildOSSSystem.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildS3FileSystem.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/OSSFileSystem.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/S3FileSystem.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOMethodInterceptorFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/DefaultResultSetFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/OrcResultSetReader.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/OrcResultSetWriter.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ParquetResultSetReader.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ParquetResultSetWriter.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSet.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetReader.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/html/HtmlResultSet.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultDeserializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSerializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSet.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/picture/PictureResultSet.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultDeserializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSerializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSet.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSerializer.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSet.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Compaction.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ParserFactory.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsWriter.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Variable.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/VariableParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/PYScriptCompaction.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/QLScriptCompaction.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/CommonScriptParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/PYScriptParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/QLScriptParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ScalaScriptParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ShellScriptParser.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/reader/StorageScriptFsReader.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/AbstractFileSource.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSource.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSplit.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/TextFileSource.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/OrcUtils.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageConfiguration.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageHelper.java delete mode 100644 linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala rename linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientFactoryTest.scala => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineMetaData.scala (69%) rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/operator/OperatorFactory.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineRecord.scala (67%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/entity/ResourceOperationType.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsReader.scala (85%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/AlertUtils.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVReader.scala (91%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsReader.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/script/ScriptMetaData.java => scala/org/apache/linkis/storage/excel/ExcelFsWriter.scala} (59%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelXlsReader.java rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/ResultResource.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelReader.scala (91%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/excel/StorageMultiExcelWriter.java => scala/org/apache/linkis/storage/excel/StorageMultiExcelWriter.scala} (58%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/io/IOClientFactory.java => scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala} (53%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/resultset/ResultRecord.java => scala/org/apache/linkis/storage/resultset/ResultMetaData.scala} (85%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultRecord.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetFactory.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetWriter.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/html/HtmlResultSet.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOMetaData.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IORecord.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/script/ScriptFsReader.java => scala/org/apache/linkis/storage/resultset/io/IOResultDeserializer.scala} (56%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSerializer.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSet.scala rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/label/LabelUpdateRequest.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/picture/PictureResultSet.scala (53%) rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/RequestEngineUnlock.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableMetaData.scala (70%) rename linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/PythonExecuteError.java => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala (71%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/resultset/table/TableMetaData.java => scala/org/apache/linkis/storage/resultset/table/TableResultSet.scala} (57%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSerializer.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSet.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsReader.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsWriter.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/LineMetaData.java => scala/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.scala} (60%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/PYScriptCompaction.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/QLScriptCompaction.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.scala rename linkis-commons/linkis-storage/src/main/{java/org/apache/linkis/storage/excel/ExcelFsWriter.java => scala/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.scala} (56%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala rename linkis-public-enhancements/linkis-pes-common/src/test/scala/org/apache/linkis/errorcode/common/LinkisErrorCodeTest.scala => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/PYScriptParser.scala (59%) rename linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/response/ScriptFromBMLResponseTest.scala => linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/QLScriptParser.scala (59%) create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ScalaScriptParser.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ShellScriptParser.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/reader/StorageScriptFsReader.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/TextFileSource.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageHelper.scala create mode 100644 linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala delete mode 100644 linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/LineMetaDataTest.java delete mode 100644 linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/resultset/StorageResultSetWriterFactoryTest.java delete mode 100644 linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/source/ResultsetFileSourceTest.java delete mode 100644 linkis-commons/linkis-storage/src/test/resources/result-read-test.dolphin delete mode 100644 linkis-commons/linkis-storage/src/test/resources/storage-read-test.dolphin rename linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientImplTest.scala => linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala (72%) delete mode 100644 linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/ECPathUtils.java rename linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/{GovernanceCommonConf.scala => GovernaceCommonConf.scala} (81%) create mode 100644 linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala delete mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.java delete mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.java rename linkis-computation-governance/{linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/score/LabelScoreServiceInstance.java => linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/utils/ECMCacheUtils.java} (53%) create mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala create mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala rename linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/restful/ListenerTest.java => linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/ECMMetricsService.scala (91%) create mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMMetricsService.scala delete mode 100644 linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/test/java/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterServiceTest.java delete mode 100644 linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/java/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.java create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.scala create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoad.scala create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHook.scala create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonSparkEngineHook.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.java => linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/AbstractLogCache.java (50%) rename linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextValue.java => linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/TimeLogCache.java (68%) delete mode 100644 linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.java create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala rename linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/{EngineConnPidCallback.scala => EngineConnIdentifierCallback.scala} (99%) create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnTimedCallback.scala create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorCode.scala delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/EngineConnPluginLoaderConf.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/loader/EngineConnPluginsLoaderFactory.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnLocalizeResourceImpl.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceFactoryService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/GetEngineConnResourceRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshAllEngineConnResourceRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshEngineConnResourceRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ApplicationManagerSpringConfiguration.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common/metrics => am/converter}/MetricsConverter.java (97%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common => am}/label/LabelChecker.java (95%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common => am}/pointer/EMNodPointer.java (96%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common => am}/pointer/EngineNodePointer.java (96%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common => am}/pointer/NodePointer.java (96%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/{service/common => am}/pointer/NodePointerBuilder.java (94%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/DefaultECAvailableRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/DefaultNodeSelector.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/AvailableNodeSelectRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/ConcurrencyNodeSelectRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/NewECMStandbyRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/ResourceNodeSelectRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/ScoreNodeSelectRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/selector/rule/TaskInfoNodeSelectRule.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/cache/ConfCacheRemoveBroadcastListener.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/DefaultECMOperateService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/DefaultEMInfoService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/DefaultEMRegisterService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/DefaultEMUnregisterService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/ECMOperateService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/EMRegisterService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/em/EMUnregisterService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/AbstractEngineService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineAskEngineService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineConnCanKillService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineCreateService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineInfoService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineOperateService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineRecycleService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/EngineConnStatusCallbackService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/EngineInfoService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/EngineOperateService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/EngineStopService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/heartbeat/AMHeartbeatService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/monitor/NodeHeartbeatMonitor.java create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/EMUtils.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/LinkisUtils.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/utils/AMUtils.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/utils/DefaultRetryHandler.java create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/CanCreateECRes.java rename linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextKey.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/vo/ConfigVo.java (51%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/conf/LabelManagerConf.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/score/DefaultNodeLabelScorer.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/NodeLabelRemoveService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/NodeLabelService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelAddService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelRemoveService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/impl/DefaultResourceLabelService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/entity/LabelResourceMapping.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/external/kubernetes/KubernetesResourceRequester.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/external/parser/KubernetesResourceIdentifierParser.java create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/external/yarn/YarnQueueInfo.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/message/RMMessageService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/restful/RMMonitorRest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/RequestResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/ResourceLockService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/ChangeType.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/DefaultReqResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/DefaultResourceManager.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/DriverAndKubernetesReqResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/DriverAndYarnReqResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/LabelResourceServiceImpl.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/ResourceLogService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/service/impl/UserResourceService.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/RMUtils.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/UserConfiguration.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/resources/mapper/{postgresql => common}/EngineConnBmlResourceMapper.xml (69%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/resources/mapper/mysql/EngineConnBmlResourceMapper.xml create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/loader/EngineConnPluginLoaderConf.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/engineplugin/server/interceptor/EngineConnLaunchInterceptor.java => scala/org/apache/linkis/engineplugin/server/interceptor/EngineConnLaunchInterceptor.scala} (76%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/service/common/label/ManagerLabelService.java => scala/org/apache/linkis/engineplugin/server/loader/EngineConnPluginsLoader.scala} (67%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/label/service/UserLabelService.java => scala/org/apache/linkis/engineplugin/server/localize/EngineConnBmlResourceGenerator.scala} (58%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceFactoryService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/engineplugin/server/service/EngineConnLaunchService.java => scala/org/apache/linkis/engineplugin/server/service/EngineConnLaunchService.scala} (73%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/engineplugin/server/service/EngineConnResourceFactoryService.java => scala/org/apache/linkis/engineplugin/server/service/EngineConnResourceFactoryService.scala} (67%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/EngineConnResourceService.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultMetaData.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/hook/AskEngineConnHook.scala (84%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/EngineAskEngineService.java => scala/org/apache/linkis/manager/am/hook/AskEngineConnHookContext.scala} (78%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/DefaultNodeSelector.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/ECAvailableRule.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/selector/NodeSelector.java => scala/org/apache/linkis/manager/am/selector/NodeSelector.scala} (74%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/AvailableNodeSelectRule.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/LineRecord.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/ConcurrencyNodeSelectRule.scala (60%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/selector/rule/HotspotExclusionRule.java => scala/org/apache/linkis/manager/am/selector/rule/HotspotExclusionRule.scala} (53%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/NewECMStandbyRule.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/selector/rule/NodeSelectRule.java => scala/org/apache/linkis/manager/am/selector/rule/NodeSelectRule.scala} (81%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/OverLoadNodeSelectRule.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/ResourceNodeSelectRule.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/ScoreNodeSelectRule.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/selector/rule/TaskInfoNodeSelectRule.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/EMEngineService.java => scala/org/apache/linkis/manager/am/service/EMEngineService.scala} (52%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Parser.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/EngineService.scala (79%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/HeartbeatService.java => scala/org/apache/linkis/manager/am/service/HeartbeatService.scala} (85%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/cache/ConfCacheRemoveBroadcastListener.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultECMOperateService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMInfoService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMRegisterService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMUnregisterService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/engineplugin/server/localize/EngineConnLocalizeResource.java => scala/org/apache/linkis/manager/am/service/em/ECMOperateService.scala} (72%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/em/EMInfoService.java => scala/org/apache/linkis/manager/am/service/em/EMInfoService.scala} (63%) rename linkis-computation-governance/linkis-manager/{linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/ResourceInfo.java => linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/EMRegisterService.scala} (66%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOClient.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/EMUnregisterService.scala (70%) rename linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/SolrSourceConfig.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/AbstractEngineService.scala (57%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineAskEngineService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineConnCanKillService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineCreateService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineInfoService.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/Column.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineOperateService.scala (55%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineRecycleService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/DefaultEngineSwitchService.java => scala/org/apache/linkis/manager/am/service/engine/DefaultEngineSwitchService.scala} (68%) rename linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/bean/ClientTestContextValue.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineAskEngineService.scala (59%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/EngineService.java => scala/org/apache/linkis/manager/am/service/engine/EngineConnCanKillService.scala} (75%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/EngineCreateService.java => scala/org/apache/linkis/manager/am/service/engine/EngineCreateService.scala} (69%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineInfoService.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableRecord.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineKillService.scala (67%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IORecord.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineOperateService.scala (67%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/EngineRecycleService.java => scala/org/apache/linkis/manager/am/service/engine/EngineRecycleService.scala} (72%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/EngineReuseService.java => scala/org/apache/linkis/manager/am/service/engine/EngineReuseService.scala} (69%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineStopService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/service/engine/EngineSwitchService.java => scala/org/apache/linkis/manager/am/service/engine/EngineSwitchService.scala} (72%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/heartbeat/AMHeartbeatService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/monitor/NodeHeartbeatMonitor.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/utils/AMUtils.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/label/LabelManagerUtils.java => scala/org/apache/linkis/manager/label/LabelManagerUtils.scala} (61%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/score/DefaultNodeLabelScorer.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/score/LabelScoreServiceInstance.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/label/score/NodeLabelScorer.java => scala/org/apache/linkis/manager/label/score/NodeLabelScorer.scala} (60%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/label/service/NodeLabelAddService.java => scala/org/apache/linkis/manager/label/service/NodeLabelAddService.scala} (75%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/NodeLabelRemoveService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/NodeLabelService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/label/service/ResourceLabelService.java => scala/org/apache/linkis/manager/label/service/ResourceLabelService.scala} (62%) rename linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/entity/enumeration/ExpireTypeTest.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/UserLabelService.scala (61%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelAddService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelRemoveService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultResourceLabelService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/rm/external/kubernetes/KubernetesResourceIdentifier.java => scala/org/apache/linkis/manager/rm/entity/LabelResourceMap.scala} (51%) rename linkis-computation-governance/linkis-manager/{linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/OperateResponse.java => linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/entity/ResourceOperationType.scala} (81%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/message/RMMessageService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/restful/RMMonitorRest.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/rm/service/LabelResourceService.java => scala/org/apache/linkis/manager/rm/service/LabelResourceService.scala} (53%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/RequestResourceService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/ResourceLockService.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/rm/service/ResourceManager.java => scala/org/apache/linkis/manager/rm/service/ResourceManager.scala} (54%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOMetaData.java => linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DefaultReqResourceService.scala (66%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DefaultResourceManager.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DriverAndYarnReqResourceService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/LabelResourceServiceImpl.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/ResourceLogService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/UserResourceService.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/AcrossClusterRulesJudgeUtils.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/RMUtils.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/UserConfiguration.scala rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/service/common/label/LabelFilter.java => scala/org/apache/linkis/manager/service/common/label/LabelFilter.scala} (73%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/{java/org/apache/linkis/manager/am/selector/ECAvailableRule.java => scala/org/apache/linkis/manager/service/common/label/ManagerLabelService.scala} (68%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/Scan.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/WebApplicationServer.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/engineplugin/server/dao/BaseDaoTest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/engineplugin/server/dao/EngineConnBmlResourceDaoTest.java rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/manager/am/util/{ECResourceInfoLinkisUtilsTest.java => ECResourceInfoUtilsTest.java} (98%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/manager/rm/external/dao/ExternalResourceProviderDaoTest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/resources/application.properties delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/resources/create.sql delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/resources/create_pg.sql delete mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/resources/linkis.properties create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/scala/org/apache/linkis/manager/label/conf/LabelManagerConfTest.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/scala/org/apache/linkis/manager/rm/utils/RMUtilsTest.scala delete mode 100644 linkis-computation-governance/linkis-manager/linkis-label-common/src/test/java/org/apache/linkis/manager/label/entity/engine/EngineTypeLabelTest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/conf/ManagerCommonConf.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/operator/Operator.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/operator/OperatorFactoryImpl.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/RequestManagerUnlock.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/ResponseEngineLock.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/ResponseEngineUnlock.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/em/ECMOperateRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/em/ECMOperateResponse.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/em/EMResourceRegisterRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/em/RegisterEMRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/em/RegisterEMResponse.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineAskAsyncResponse.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineAsyncResponse.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnStatusCallback.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnStatusCallbackToAM.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateError.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateSuccess.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineOperateRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineOperateResponse.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/label/LabelReportRequest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/resource/ResourceProtocol.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/resource/ResourceUsedProtocol.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/resource/ResourceWithStatus.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/resource/ResponseTaskRunningInfo.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/NotEnoughResource.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/RequestResourceAndWait.java create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/operator/Operator.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/operator/OperatorFactory.scala rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/{java/org/apache/linkis/manager/common/protocol/RequestEngineLock.java => scala/org/apache/linkis/manager/common/protocol/EngineLock.scala} (56%) rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/{java/org/apache/linkis/manager/common/protocol/OperateRequest.java => scala/org/apache/linkis/manager/common/protocol/OperateRequest.scala} (53%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptRecord.java => linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/OperateResponse.scala (80%) rename linkis-computation-governance/linkis-manager/{linkis-application-manager/src/main/java/org/apache/linkis/manager/am/utils/RetryHandler.java => linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/em/ECMOperateRequest.scala} (58%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/em/ECMOperateResponse.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/em/EMResourceRegisterRequest.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/em/RegisterEMRequest.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/em/RegisterEMResponse.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/engine/EngineAsyncResponse.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/engine/EngineConnStatusCallback.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/engine/EngineOperateRequest.scala create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/engine/EngineOperateResponse.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelAnalysisException.java => linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/label/LabelUpdateRequest.scala (65%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/resource/ResourceProtocol.scala rename linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/listener/execution/ExecutionTaskStatusListener.scala => linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/resource/ResourceWithStatus.scala (70%) rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/{java/org/apache/linkis/manager/common/protocol/EngineLock.java => scala/org/apache/linkis/manager/rm/ResourceInfo.scala} (82%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOMethodInterceptorCreator.java => linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/rm/ResultResource.scala (79%) create mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/ECResourceRecordMapper.xml rename linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/{postgresql => common}/LabelManagerMapper.xml (92%) rename linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/{postgresql => common}/ResourceManagerMapper.xml (85%) delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/mysql/ECResourceRecordMapper.xml delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/mysql/LabelManagerMapper.xml delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/mysql/ResourceManagerMapper.xml delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/postgresql/ECResourceRecordMapper.xml delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/ECResourceRecordMapperTest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/LabelManagerMapperTest.java delete mode 100644 linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/create_pg.sql create mode 100644 linkis-dist/package/db/upgrade/1.7.0_schema/mysql/linkis_ddl.sql create mode 100644 linkis-dist/package/db/upgrade/1.7.0_schema/mysql/linkis_dml.sql create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/errorcode/FlinkErrorCodeSummary.java create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/exception/ExecutorInitException.java create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/exception/FlinkInitFailedException.java create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/exception/JobExecutionException.java create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/exception/SqlExecutionException.java rename linkis-engineconn-plugins/{shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/exception/NoCorrectUserException.java => flink/flink-core/src/main/java/org/apache/linkis/engineconnplugin/flink/exception/SqlParseException.java} (63%) create mode 100644 linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkManagerConcurrentExecutor.scala delete mode 100644 linkis-engineconn-plugins/io_file/src/test/java/executor/IoEngineConnExecutorTest.java delete mode 100644 linkis-engineconn-plugins/io_file/src/test/resources/testIoResult.dolphin delete mode 100644 linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/PythonEngineConnPlugin.java delete mode 100644 linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/conf/PythonEngineConfiguration.java delete mode 100644 linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/executor/PythonEngineConnExecutor.java delete mode 100644 linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/hook/PythonVersionEngineHook.java delete mode 100644 linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/utils/Kind.java create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/PythonEngineConnPlugin.scala create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/conf/PythonEngineConfiguration.scala rename linkis-engineconn-plugins/python/src/main/{java/org/apache/linkis/manager/engineplugin/python/exception/PythonSessionStartFailedExeception.java => scala/org/apache/linkis/manager/engineplugin/python/exception/NoSupportEngineException.scala} (68%) create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/exception/PythonSessionNullException.scala create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/executor/PythonEngineConnExecutor.scala create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/hook/PythonVersionEngineHook.scala rename linkis-engineconn-plugins/{shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/builder/ShellProcessEngineConnLaunchBuilder.java => python/src/main/scala/org/apache/linkis/manager/engineplugin/python/launch/PythonProcessEngineConnLaunchBuilder.scala} (81%) create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/utils/Kind.scala create mode 100644 linkis-engineconn-plugins/python/src/main/scala/org/apache/linkis/manager/engineplugin/python/utils/State.scala delete mode 100644 linkis-engineconn-plugins/python/src/test/java/org/apache/linkis/manager/engineplugin/python/exception/TestNoSupportEngineException.java delete mode 100644 linkis-engineconn-plugins/python/src/test/java/org/apache/linkis/manager/engineplugin/python/executor/TestPythonEngineConnExecutor.java delete mode 100644 linkis-engineconn-plugins/python/src/test/java/org/apache/linkis/manager/engineplugin/python/factory/TestPythonEngineConnFactory.java rename linkis-engineconn-plugins/python/src/test/{java/org/apache/linkis/manager/engineplugin/python/TestPythonEngineConnPlugin.java => scala/org/apache/linkis/manager/engineplugin/python/TestPythonEngineConnPlugin.scala} (73%) rename linkis-engineconn-plugins/python/src/test/{java/org/apache/linkis/manager/engineplugin/python/conf/TestPythonEngineConfiguration.java => scala/org/apache/linkis/manager/engineplugin/python/conf/TestPythonEngineConfiguration.scala} (59%) create mode 100644 linkis-engineconn-plugins/python/src/test/scala/org/apache/linkis/manager/engineplugin/python/exception/TestNoSupportEngineException.scala create mode 100644 linkis-engineconn-plugins/python/src/test/scala/org/apache/linkis/manager/engineplugin/python/executor/TestPythonEngineConnExecutor.scala create mode 100644 linkis-engineconn-plugins/python/src/test/scala/org/apache/linkis/manager/engineplugin/python/factory/TestPythonEngineConnFactory.scala rename linkis-engineconn-plugins/python/src/test/{java/org/apache/linkis/manager/engineplugin/python/utils/TestKind.java => scala/org/apache/linkis/manager/engineplugin/python/utils/TestKind.scala} (66%) rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/resource/ResponseTaskYarnResource.java => linkis-engineconn-plugins/python/src/test/scala/org/apache/linkis/manager/engineplugin/python/utils/TestState.scala (52%) delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/ShellEngineConnPlugin.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/common/ShellEngineConnPluginConst.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/conf/ShellEngineConnConf.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/exception/ShellCodeErrorException.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/executor/ReaderThread.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/executor/ShellECTaskInfo.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnConcurrentExecutor.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnExecutor.java delete mode 100644 linkis-engineconn-plugins/shell/src/main/java/org/apache/linkis/manager/engineplugin/shell/executor/YarnAppIdExtractor.java create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/ShellEngineConnPlugin.scala rename linkis-engineconn-plugins/{python/src/main/java/org/apache/linkis/manager/engineplugin/python/launch/PythonProcessEngineConnLaunchBuilder.java => shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/builder/ShellProcessEngineConnLaunchBuilder.scala} (81%) rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/IExcelRowDeal.java => linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/common/ShellEnginePluginConst.scala (76%) rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnBmlResourceGenerator.java => linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/conf/ShellEngineConnConf.scala (68%) create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/exception/NoCorrectUserException.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ReaderThread.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellECTaskInfo.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnConcurrentExecutor.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnExecutor.scala create mode 100644 linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/YarnAppIdExtractor.scala delete mode 100644 linkis-engineconn-plugins/shell/src/test/java/org/apache/linkis/manager/engineplugin/shell/executor/TestShellEngineConnExecutor.java rename linkis-engineconn-plugins/shell/src/test/{java/org/apache/linkis/manager/engineplugin/shell/TestShellEngineConnPlugin.java => scala/org/apache/linkis/manager/engineplugin/shell/TestShellEngineConnPlugin.scala} (75%) rename linkis-engineconn-plugins/shell/src/test/{java/org/apache/linkis/manager/engineplugin/shell/common/TestShellEngineConnPluginConst.java => scala/org/apache/linkis/manager/engineplugin/shell/common/TestShellEngineConnPluginConst.scala} (70%) rename linkis-engineconn-plugins/shell/src/test/{java/org/apache/linkis/manager/engineplugin/shell/exception/TestNoCorrectUserException.java => scala/org/apache/linkis/manager/engineplugin/shell/exception/TestNoCorrectUserException.scala} (67%) create mode 100644 linkis-engineconn-plugins/shell/src/test/scala/org/apache/linkis/manager/engineplugin/shell/executor/TestShellEngineConnExecutor.scala delete mode 100644 linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/SolrSource.scala delete mode 100644 linkis-engineconn-plugins/spark/src/test/resources/etltest.dolphin delete mode 100644 linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestExcelCala.scala delete mode 100644 linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestKafkaCala.scala delete mode 100644 linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestRedisCalc.scala delete mode 100644 linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestRocketmqCala.scala create mode 100644 linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/client/RemoteClientHolder.scala rename linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/AvailableResource.java => linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/exception/DataSourceRpcErrorException.scala (69%) create mode 100644 linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/ConnectParamsResolver.scala create mode 100644 linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopDataSourceParamsResolver.scala delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/TrinoEngineConnPlugin.java delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.java delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/conf/TrinoEngineConfig.java delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/exception/TrinoClientException.java delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/exception/TrinoGrantmaException.java delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/exception/TrinoModifySchemaException.java delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/exception/TrinoStateInvalidException.java delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.java rename {linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/java/org/apache/linkis/engineconn/common => linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino}/password/CommandPasswordCallback.java (93%) rename {linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/java/org/apache/linkis/engineconn/common => linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino}/password/StaticPasswordCallback.java (95%) delete mode 100644 linkis-engineconn-plugins/trino/src/main/java/org/apache/linkis/engineplugin/trino/utils/TrinoCode.java create mode 100644 linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/TrinoEngineConnPlugin.scala rename linkis-engineconn-plugins/trino/src/main/{java/org/apache/linkis/engineplugin/trino/builder/TrinoProcessEngineConnLaunchBuilder.java => scala/org/apache/linkis/engineplugin/trino/builder/TrinoProcessEngineConnLaunchBuilder.scala} (67%) create mode 100644 linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.scala create mode 100644 linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoEngineConfig.scala create mode 100644 linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/exception/TrinoException.scala create mode 100644 linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.scala create mode 100644 linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/utils/TrinoCode.scala rename linkis-engineconn-plugins/trino/src/main/{java/org/apache/linkis/engineplugin/trino/utils/TrinoSQLHook.java => scala/org/apache/linkis/engineplugin/trino/utils/TrinoSQLHook.scala} (72%) create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ApplicationConfiguration.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/ResourceClear.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/TaskArchiveClear.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/JobMonitorUtils.java create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/index/JobIndexHitEvent.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/index/JobIndexRule.scala create mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/index/JobIndexSender.scala delete mode 100644 linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/UserLabelAlertUtils.scala rename linkis-public-enhancements/linkis-bml-server/src/main/resources/mapper/{postgresql => common}/BmlProjectMapper.xml (63%) rename linkis-public-enhancements/linkis-bml-server/src/main/resources/mapper/{mysql => common}/ResourceMapper.xml (82%) rename linkis-public-enhancements/linkis-bml-server/src/main/resources/mapper/{postgresql => common}/TaskMapper.xml (91%) rename linkis-public-enhancements/linkis-bml-server/src/main/resources/mapper/{postgresql => common}/VersionMapper.xml (74%) delete mode 100644 linkis-public-enhancements/linkis-bml-server/src/main/resources/mapper/mysql/BmlProjectMapper.xml delete mode 100644 linkis-public-enhancements/linkis-bml-server/src/main/resources/mapper/mysql/TaskMapper.xml delete mode 100644 linkis-public-enhancements/linkis-bml-server/src/main/resources/mapper/mysql/VersionMapper.xml delete mode 100644 linkis-public-enhancements/linkis-bml-server/src/main/resources/mapper/postgresql/ResourceMapper.xml delete mode 100644 linkis-public-enhancements/linkis-bml-server/src/test/resources/create_pg.sql rename linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/{mysql => common}/AcrossClusterRuleMapper.xml (98%) rename linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/{mysql => common}/ConfigKeyLimitForUserMapper.xml (100%) rename linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/{mysql => common}/ConfigMapper.xml (98%) rename linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/{postgresql/UserTenantMapper.xml => common/DepartmentTenantMapper.xml} (53%) rename linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/{mysql => common}/TemplateConfigKeyMapper.xml (100%) rename linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/{mysql => common}/UserIpMapper.xml (93%) rename linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/{mysql => common}/UserTenantMapper.xml (97%) delete mode 100644 linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/AcrossClusterRuleMapper.xml delete mode 100644 linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigKeyLimitForUserMapper.xml delete mode 100644 linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigMapper.xml delete mode 100644 linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/TemplateConfigKeyMapper.xml delete mode 100644 linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/UserIpMapper.xml delete mode 100644 linkis-public-enhancements/linkis-configuration/src/test/resources/create_pg.sql rename linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/engine/EngineConnCanKillService.java => linkis-public-enhancements/linkis-cs-server/src/main/java/org/apache/linkis/cs/conf/CSConfiguration.java (67%) rename linkis-public-enhancements/linkis-cs-server/src/main/resources/mapper/{mysql => common}/contextHistoryMapper.xml (86%) rename linkis-public-enhancements/linkis-cs-server/src/main/resources/mapper/{mysql => common}/contextIDListenerMapper.xml (83%) rename linkis-public-enhancements/linkis-cs-server/src/main/resources/mapper/{mysql => common}/contextIDMapper.xml (98%) rename linkis-public-enhancements/linkis-cs-server/src/main/resources/mapper/{mysql => common}/contextMapMapper.xml (98%) delete mode 100644 linkis-public-enhancements/linkis-cs-server/src/main/resources/mapper/postgresql/contextHistoryMapper.xml delete mode 100644 linkis-public-enhancements/linkis-cs-server/src/main/resources/mapper/postgresql/contextIDListenerMapper.xml delete mode 100644 linkis-public-enhancements/linkis-cs-server/src/main/resources/mapper/postgresql/contextIDMapper.xml delete mode 100644 linkis-public-enhancements/linkis-cs-server/src/main/resources/mapper/postgresql/contextMapMapper.xml delete mode 100644 linkis-public-enhancements/linkis-cs-server/src/test/java/org/apache/linkis/cs/persistence/dao/ContextMapMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-cs-server/src/test/resources/create_pg.sql rename linkis-public-enhancements/linkis-instance-label-server/src/main/resources/mapper/{mysql => common}/InsLabelRelationMapper.xml (76%) rename linkis-public-enhancements/linkis-instance-label-server/src/main/resources/mapper/{mysql => common}/InstanceInfoMapper.xml (74%) rename linkis-public-enhancements/linkis-instance-label-server/src/main/resources/mapper/{mysql => common}/InstanceLabelMapper.xml (70%) delete mode 100644 linkis-public-enhancements/linkis-instance-label-server/src/main/resources/mapper/postgresql/InsLabelRelationMapper.xml delete mode 100644 linkis-public-enhancements/linkis-instance-label-server/src/main/resources/mapper/postgresql/InstanceInfoMapper.xml delete mode 100644 linkis-public-enhancements/linkis-instance-label-server/src/main/resources/mapper/postgresql/InstanceLabelMapper.xml delete mode 100644 linkis-public-enhancements/linkis-instance-label-server/src/test/java/org/apache/linkis/instance/label/dao/InstanceInfoDaoTest.java delete mode 100644 linkis-public-enhancements/linkis-instance-label-server/src/test/resources/create_pg.sql create mode 100644 linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/util/JobhistoryUtils.java delete mode 100644 linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobDetailMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/restful/api/QueryRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-jobhistory/src/test/resources/create_pg.sql delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/bml/client/errorcode/BmlClientErrorCodeSummaryTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/builder/ContextClientFactoryTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/builder/HttpContextClientConfigTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/errorcode/CsClientErrorCodeSummaryTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/Test.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/bean/ClientTestContextKey.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/bean/ResourceValueBean.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/listener/CommonContextKeyListener.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/no_context_search/TestClear.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/restful/RestfulTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/service/TestInfo.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/service/TestRemove.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/service/TestSearchService.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/test_multiuser/TestChangeContext.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/test_multiuser/TestCreateContext.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/utils/ContextClientConfTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/utils/ContextServiceUtilsTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/utils/SerializeHelperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/TestDataSourceClient.scala delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/TestHiveClient.scala delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/TestMysqlClient.scala delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/config/DatasourceClientConfigTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/exception/DataSourceClientBuilderExceptionTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/datasource/client/imp/LinkisDataSourceRemoteClientTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/ClientConfigurationTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/ErrorCodeClientBuilderTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/LinkisErrorCodeClientTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/handler/ErrorCodeHandlerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/handler/LinkisErrorCodeHandlerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/manager/LinkisErrorCodeManagerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/errorcode/client/synchronizer/LinkisErrorCodeSynchronizerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/errorcode/client/action/ErrorCodeActionTest.scala delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/action/OpenScriptFromBMLActionTest.scala delete mode 100644 linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/conf/WorkspaceClientConfTest.scala create mode 100644 linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/udf/entity/PythonModuleInfoVO.java create mode 100644 linkis-public-enhancements/linkis-pes-common/src/main/scala/org/apache/linkis/udf/api/rpc/RequestPythonModuleProtocol.scala rename linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/csv/CSVFsWriter.java => linkis-public-enhancements/linkis-pes-common/src/main/scala/org/apache/linkis/udf/api/rpc/ResponsePythonModuleProtocol.scala (60%) delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/entity/enumeration/ContextScopeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/entity/enumeration/ContextTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/entity/enumeration/DBTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/entity/enumeration/WorkTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/protocol/ContextHistoryTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/protocol/ContextIDTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/protocol/ContextKeyTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/protocol/ContextKeyValueTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/protocol/ContextValueTypeTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/helper/ContextSerializationHelperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/context/CombinedNodeIDContextIDSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/context/CommonContextKeySerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/value/LinkisBMLResourceSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/value/data/CSResultDataSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/value/data/LinkisJobDataSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/value/metadata/CSTableSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/impl/value/object/CSFlowInfosSerializerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/serialize/test/ContextSerializationHelperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/utils/CSCommonUtilsTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/common/utils/CSHighAvailableUtilsTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextID.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextKeyValue.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/errorcode/common/CommonConfTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/metadata/query/common/MdmConfigurationTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/metadata/query/common/cache/CacheConfigurationTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/metadata/query/common/cache/ConnCacheManagerTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/metadata/query/common/exception/MetaMethodInvokeExceptionTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/metadata/query/common/exception/MetaRuntimeExceptionTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/udf/excepiton/UDFExceptionTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/udf/utils/ConstantVarTest.java rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/ConfigurationConfigKeyMapper.xml (68%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/DatasourceAccessMapper.xml (91%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/DatasourceEnvMapper.xml (90%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/DatasourceTypeKeyMapper.xml (92%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/DatasourceTypeMapper.xml (86%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/GatewayAuthTokenMapper.xml (90%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{postgresql => common}/PsErrorCodeMapper.xml (86%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/RmExternalResourceProviderMapper.xml (86%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{postgresql/UdfManagerMapper.xml => common/UdfBaseInfoMapper.xml} (61%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/UdfManagerMapper.xml (93%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/UdfTreeMapper.xml (88%) rename linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/{mysql => common}/VarMapper.xml (91%) delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/mysql/PsErrorCodeMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/ConfigurationConfigKeyMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/DatasourceAccessMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/DatasourceEnvMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/DatasourceTypeKeyMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/DatasourceTypeMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/GatewayAuthTokenMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/RmExternalResourceProviderMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/UdfTreeMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/main/resources/mapper/postgresql/VarMapper.xml delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/Scan.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/WebApplicationServer.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/BaseDaoTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/ConfigurationConfigKeyMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/ConfigurationConfigValueMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/ConfigurationKeyEngineRelationMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/DatasourceAccessMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/DatasourceEnvMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/DatasourceTypeMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/EngineConnPluginBmlResourcesMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/GatewayAuthTokenMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/PsErrorCodeMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/RmExternalResourceProviderMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/UdfManagerMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/UdfTreeMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/ConfigurationTemplateRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceAccessRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceEnvRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceTypeKeyRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceTypeRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/ErrorCodeRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/GatewayAuthTokenRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/MvcUtils.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/RmExternalResourceProviderRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/UdfManagerRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/restful/UdfTreeRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/DatasourceAccessServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/DatasourceEnvServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/DatasourceTypeKeyServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/DatasourceTypeServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/ErrorCodeServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/GatewayAuthTokenServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/RmExternalResourceProviderServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/UdfManagerServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/service/UdfTreeServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/errorcode/server/Scan.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/errorcode/server/WebApplicationServer.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/errorcode/server/dao/BaseDaoTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/errorcode/server/dao/ErrorCodeMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/Scan.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/WebApplicationServer.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/dao/BaseDaoTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/dao/VarMapperTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/restful/MvcUtils.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/restful/api/VariableRestfulApiTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/java/org/apache/linkis/variable/service/VariableServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/basedata_manager_create.sql delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/basedata_manager_create_pg.sql delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/error_code_create.sql delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/error_code_create_pg.sql delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/info.text delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/variable_create.sql delete mode 100644 linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/variable_create_pg.sql delete mode 100644 linkis-public-enhancements/linkis-ps-common-lock/src/test/resources/create_pg.sql create mode 100644 linkis-public-enhancements/linkis-ps-common-lock/src/test/resources/data.sql rename linkis-public-enhancements/{linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/CgManagerLabelMapperTest.java => linkis-udf-service/src/main/java/org/apache/linkis/udf/dao/PythonModuleInfoMapper.java} (50%) create mode 100644 linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/entity/PythonModuleInfo.java rename linkis-public-enhancements/{linkis-pes-publicservice/src/test/java/org/apache/linkis/basedatamanager/server/dao/DatasourceTypeKeyMapperTest.java => linkis-udf-service/src/main/java/org/apache/linkis/udf/service/PythonModuleInfoService.java} (54%) create mode 100644 linkis-public-enhancements/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/PythonModuleInfoServiceImpl.java create mode 100644 linkis-public-enhancements/linkis-udf-service/src/main/resources/mapper/common/PythonModuleInfoMapper.xml rename linkis-public-enhancements/linkis-udf-service/src/main/resources/mapper/{postgresql => common}/UDFDao.xml (76%) rename linkis-public-enhancements/linkis-udf-service/src/main/resources/mapper/{postgresql => common}/UDFVersionDao.xml (85%) delete mode 100644 linkis-public-enhancements/linkis-udf-service/src/main/resources/mapper/mysql/UDFDao.xml delete mode 100644 linkis-public-enhancements/linkis-udf-service/src/main/resources/mapper/mysql/UDFVersionDao.xml create mode 100644 linkis-public-enhancements/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/PythonModuleInfoMapperTest.java create mode 100644 linkis-public-enhancements/linkis-udf-service/src/test/java/org/apache/linkis/udf/service/PythonModuleInfoServiceTest.java delete mode 100644 linkis-public-enhancements/linkis-udf-service/src/test/resources/create_pg.sql create mode 100644 linkis-public-enhancements/linkis-udf-service/src/test/resources/data.sql create mode 100644 linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/parser/ECMRequestGatewayParser.scala diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala index ec65fd0a67..822bc2aa07 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala @@ -81,7 +81,7 @@ object Configuration extends Logging { "The request interface %s is abnormal. You can try to troubleshoot common problems in the knowledge base document" ) - val LINKIS_TOKEN = CommonVars("wds.linkis.token", "LINKIS-AUTH-eTaYLbQpmIulPyrXcMl") + val LINKIS_TOKEN = CommonVars("wds.linkis.token", "LINKIS-AUTH") val GLOBAL_CONF_CHN_NAME = "全局设置" diff --git a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala index 3ebbbc33ba..e18837fd5f 100644 --- a/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala +++ b/linkis-commons/linkis-hadoop-common/src/main/scala/org/apache/linkis/hadoop/common/utils/HDFSUtils.scala @@ -152,11 +152,11 @@ object HDFSUtils extends Logging { hdfsFileSystemContainer.updateLastAccessTime hdfsFileSystemContainer.getFileSystem } else { - getHDFSUserFileSystem(userName, label, getConfiguration(userName, label)) + getHDFSUserFileSystem(userName, label, getConfigurationByLabel(userName, label)) } } } else { - getHDFSUserFileSystem(userName, label, getConfiguration(userName, label)) + getHDFSUserFileSystem(userName, label, getConfigurationByLabel(userName, label)) } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/FSFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/FSFactory.java deleted file mode 100644 index 34a92ead85..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/FSFactory.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.factory.BuildFactory; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import java.text.MessageFormat; -import java.util.Map; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_FILE; - -public class FSFactory { - private static final Map buildClasses = - StorageUtils.loadClass( - StorageConfiguration.STORAGE_BUILD_FS_CLASSES.getValue(), t -> t.fsName()); - - public static BuildFactory getBuildFactory(String fsName) { - if (!buildClasses.containsKey(fsName)) { - throw new StorageWarnException( - UNSUPPORTED_FILE.getErrorCode(), - MessageFormat.format(UNSUPPORTED_FILE.getErrorDesc(), fsName)); - } - return buildClasses.get(fsName); - } - - public static Fs getFs(String fsType, String proxyUser) { - String user = StorageUtils.getJvmUser(); - return getBuildFactory(fsType).getFs(user, proxyUser); - } - - public static Fs getFs(String fsType) { - String user = StorageUtils.getJvmUser(); - return getBuildFactory(fsType).getFs(user, user); - } - - /** - * 1. If this machine has shared storage, the file:// type FS obtained here is the FS of the - * process user. 2. If this machine does not have shared storage, then the file:// type FS - * obtained is the proxy to the Remote (shared storage machine root) FS. 3. If it is HDFS, it - * returns the FS of the process user. 1、如果这台机器装有共享存储则这里获得的file://类型的FS为该进程用户的FS - * 2、如果这台机器没有共享存储则获得的file://类型的FS为代理到Remote(共享存储机器root)的FS 3、如果是HDFS则返回的就是该进程用户的FS - * - * @param fsPath - * @return - */ - public static Fs getFs(FsPath fsPath) { - return getFs(fsPath.getFsType()); - } - - /** - * 1. If the process user is passed and the proxy user and the process user are consistent, the - * file:// type FS is the FS of the process user (the shared storage exists) 2. If the process - * user is passed and the proxy user and the process user are consistent and there is no shared - * storage, the file:// type FS is the proxy to the remote (shared storage machine root) FS 3. If - * the passed proxy user and process user are consistent, the hdfs type is the FS of the process - * user. 4. If the proxy user and the process user are inconsistent, the hdfs type is the FS after - * the proxy. - * - * @param fsPath - * @param proxyUser - * @return - */ - public static Fs getFsByProxyUser(FsPath fsPath, String proxyUser) { - return getFs(fsPath.getFsType(), proxyUser); - } - - public Fs getFSByLabel(String fs, String label) { - String user = StorageUtils.getJvmUser(); - return getBuildFactory(fs).getFs(user, user, label); - } - - public Fs getFSByLabelAndUser(String fs, String label, String proxy) { - String user = StorageUtils.getJvmUser(); - return getBuildFactory(fs).getFs(user, proxy, label); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/conf/LinkisStorageConf.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/conf/LinkisStorageConf.java deleted file mode 100644 index 7fb5c1b4ac..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/conf/LinkisStorageConf.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.conf; - -import org.apache.linkis.common.conf.CommonVars; -import org.apache.linkis.common.utils.ByteTimeUtils; - -import org.apache.commons.lang3.StringUtils; - -public class LinkisStorageConf { - private static final Object CONF_LOCK = new Object(); - - public static final String DOLPHIN = "dolphin"; - - public static final String PARQUET = "parquet"; - - public static final String PARQUET_FILE_SUFFIX = ".parquet"; - - public static final String ORC = "orc"; - - public static final String ORC_FILE_SUFFIX = ".orc"; - - public static final String HDFS_FILE_SYSTEM_REST_ERRS = - CommonVars.apply( - "wds.linkis.hdfs.rest.errs", - ".*Filesystem closed.*|.*Failed to find any Kerberos tgt.*") - .getValue(); - - public static final String ROW_BYTE_MAX_LEN_STR = - CommonVars.apply("wds.linkis.resultset.row.max.str", "2m").getValue(); - - public static final String ENGINE_RESULT_TYPE = - CommonVars.apply("linkis.engine.resultSet.type", DOLPHIN, "Result type").getValue(); - - public static final long ROW_BYTE_MAX_LEN = ByteTimeUtils.byteStringAsBytes(ROW_BYTE_MAX_LEN_STR); - - public static final String FILE_TYPE = - CommonVars.apply( - "wds.linkis.storage.file.type", - "dolphin,sql,scala,py,hql,python,out,log,text,txt,sh,jdbc,ngql,psql,fql,tsql" - + "," - + PARQUET - + "," - + ORC) - .getValue(); - - private static volatile String[] fileTypeArr = null; - - private static String[] fileTypeArrParser(String fileType) { - if (StringUtils.isBlank(fileType)) { - return new String[0]; - } else { - return fileType.split(","); - } - } - - public static String[] getFileTypeArr() { - if (fileTypeArr == null) { - synchronized (CONF_LOCK) { - if (fileTypeArr == null) { - fileTypeArr = fileTypeArrParser(FILE_TYPE); - } - } - } - return fileTypeArr; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/csv/StorageCSVWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/csv/StorageCSVWriter.java deleted file mode 100644 index d98be40337..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/csv/StorageCSVWriter.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.csv; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; - -import java.io.IOException; -import java.io.OutputStream; -import java.util.stream.Stream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StorageCSVWriter extends CSVFsWriter { - private static final Logger logger = LoggerFactory.getLogger(StorageCSVWriter.class); - - private final String charset; - private final String separator; - private final boolean quoteRetouchEnable; - private final OutputStream outputStream; - - private final String delimiter; - private final StringBuilder buffer; - - public StorageCSVWriter( - String charset, String separator, boolean quoteRetouchEnable, OutputStream outputStream) { - this.charset = charset; - this.separator = separator; - this.quoteRetouchEnable = quoteRetouchEnable; - this.outputStream = outputStream; - - if (StringUtils.isBlank(separator)) { - this.delimiter = "\t"; - } else { - switch (separator) { - case "t": - this.delimiter = "\t"; - break; - default: - this.delimiter = separator; - break; - } - } - this.buffer = new StringBuilder(50000); - } - - @Override - public String getCharset() { - return charset; - } - - @Override - public String getSeparator() { - return separator; - } - - @Override - public boolean isQuoteRetouchEnable() { - return quoteRetouchEnable; - } - - @Override - public void addMetaData(MetaData metaData) throws IOException { - Column[] columns = ((TableMetaData) metaData).getColumns(); - String[] head = Stream.of(columns).map(Column::getColumnName).toArray(String[]::new); - write(head); - } - - private String compact(String[] row) { - String quotationMarks = "\""; - String dealNewlineSymbolMarks = "\n"; - StringBuilder rowBuilder = new StringBuilder(); - for (String value : row) { - String decoratedValue = value; - if (StringUtils.isNotBlank(value)) { - if (quoteRetouchEnable) { - decoratedValue = quotationMarks + value.replaceAll(quotationMarks, "") + quotationMarks; - } - decoratedValue = decoratedValue.replaceAll(dealNewlineSymbolMarks, " "); - logger.debug("decorateValue with input: {} output: {} ", value, decoratedValue); - } - rowBuilder.append(decoratedValue).append(delimiter); - } - if (rowBuilder.length() > 0 && rowBuilder.toString().endsWith(delimiter)) { - int index = rowBuilder.lastIndexOf(delimiter); - rowBuilder.delete(index, index + delimiter.length()); - } - rowBuilder.append("\n"); - if (logger.isDebugEnabled()) { - logger.debug("delimiter:" + delimiter); - } - return rowBuilder.toString(); - } - - private void write(String[] row) throws IOException { - String content = compact(row); - if (buffer.length() + content.length() > 49500) { - IOUtils.write(buffer.toString().getBytes(charset), outputStream); - buffer.setLength(0); - } - buffer.append(content); - } - - @Override - public void addRecord(Record record) throws IOException { - Object[] rows = ((TableRecord) record).row; - String[] body = - Stream.of(rows).map(dataType -> DataType.valueToString(dataType)).toArray(String[]::new); - write(body); - } - - @Override - public void flush() throws IOException { - IOUtils.write(buffer.toString().getBytes(charset), outputStream); - buffer.setLength(0); - } - - @Override - public void close() throws IOException { - flush(); - IOUtils.closeQuietly(outputStream); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/DataType.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/DataType.java deleted file mode 100644 index 6808f693ec..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/DataType.java +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.domain; - -import java.math.BigDecimal; -import java.sql.Date; -import java.sql.Timestamp; -import java.util.Optional; -import java.util.regex.Pattern; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.domain.Dolphin.LINKIS_NULL; - -public enum DataType { - NullType("void", 0), - StringType("string", 12), - BooleanType("boolean", 16), - TinyIntType("tinyint", -6), - ShortIntType("short", 5), - IntType("int", 4), - LongType("long", -5), - BigIntType("bigint", -5), - FloatType("float", 6), - DoubleType("double", 8), - CharType("char", 1), - VarcharType("varchar", 12), - DateType("date", 91), - TimestampType("timestamp", 93), - BinaryType("binary", -2), - DecimalType("decimal", 3), - ArrayType("array", 2003), - MapType("map", 2000), - ListType("list", 2001), - StructType("struct", 2002), - BigDecimalType("bigdecimal", 3); - - private final String typeName; - private final int javaSQLType; - - DataType(String typeName, int javaSQLType) { - this.typeName = typeName; - this.javaSQLType = javaSQLType; - } - - private static Logger logger = LoggerFactory.getLogger(DataType.class); - - public static final String NULL_VALUE = "NULL"; - public static final String LOWCASE_NULL_VALUE = "null"; - - // TODO Change to fine-grained regular expressions(改为精细化正则表达式) - public static final Pattern DECIMAL_REGEX = - Pattern.compile("^decimal\\(\\s*\\d*\\s*,\\s*\\d*\\s*\\)"); - - public static final Pattern SHORT_REGEX = Pattern.compile("^short.*"); - public static final Pattern INT_REGEX = Pattern.compile("^int.*"); - public static final Pattern LONG_REGEX = Pattern.compile("^long.*"); - public static final Pattern BIGINT_REGEX = Pattern.compile("^bigint.*"); - public static final Pattern FLOAT_REGEX = Pattern.compile("^float.*"); - public static final Pattern DOUBLE_REGEX = Pattern.compile("^double.*"); - - public static final Pattern VARCHAR_REGEX = Pattern.compile("^varchar.*"); - public static final Pattern CHAR_REGEX = Pattern.compile("^char.*"); - - public static final Pattern ARRAY_REGEX = Pattern.compile("array.*"); - - public static final Pattern MAP_REGEX = Pattern.compile("map.*"); - - public static final Pattern LIST_REGEX = Pattern.compile("list.*"); - - public static final Pattern STRUCT_REGEX = Pattern.compile("struct.*"); - - public static DataType toDataType(String dataType) { - if (dataType.equals("void") || dataType.equals("null")) { - return DataType.NullType; - } else if (dataType.equals("string")) { - return DataType.StringType; - } else if (dataType.equals("boolean")) { - return DataType.BooleanType; - } else if (SHORT_REGEX.matcher(dataType).matches()) { - return DataType.ShortIntType; - } else if (LONG_REGEX.matcher(dataType).matches()) { - return DataType.LongType; - } else if (BIGINT_REGEX.matcher(dataType).matches()) { - return DataType.BigIntType; - } else if (INT_REGEX.matcher(dataType).matches() - || dataType.equals("integer") - || dataType.equals("smallint")) { - return DataType.IntType; - } else if (FLOAT_REGEX.matcher(dataType).matches()) { - return DataType.FloatType; - } else if (DOUBLE_REGEX.matcher(dataType).matches()) { - return DataType.DoubleType; - } else if (VARCHAR_REGEX.matcher(dataType).matches()) { - return DataType.VarcharType; - } else if (CHAR_REGEX.matcher(dataType).matches()) { - return DataType.CharType; - } else if (dataType.equals("date")) { - return DataType.DateType; - } else if (dataType.equals("timestamp")) { - return DataType.TimestampType; - } else if (dataType.equals("binary")) { - return DataType.BinaryType; - } else if (dataType.equals("decimal") || DECIMAL_REGEX.matcher(dataType).matches()) { - return DataType.DecimalType; - } else if (ARRAY_REGEX.matcher(dataType).matches()) { - return DataType.ArrayType; - } else if (MAP_REGEX.matcher(dataType).matches()) { - return DataType.MapType; - } else if (LIST_REGEX.matcher(dataType).matches()) { - return DataType.ListType; - } else if (STRUCT_REGEX.matcher(dataType).matches()) { - return DataType.StructType; - } else { - return DataType.StringType; - } - } - - public static Object toValue(DataType dataType, String value) { - - Object result = null; - if (isLinkisNull(value)) { - return result; - } - try { - switch (dataType) { - case NullType: - result = null; - break; - case StringType: - case CharType: - case VarcharType: - case StructType: - case ListType: - case ArrayType: - case MapType: - result = value; - break; - case BooleanType: - result = isNumberNull(value) ? null : Boolean.valueOf(value); - break; - case ShortIntType: - result = isNumberNull(value) ? null : Short.valueOf(value); - break; - case IntType: - result = isNumberNull(value) ? null : Integer.valueOf(value); - break; - case LongType: - case BigIntType: - result = isNumberNull(value) ? null : Long.valueOf(value); - break; - case FloatType: - result = isNumberNull(value) ? null : Float.valueOf(value); - break; - case DoubleType: - result = isNumberNull(value) ? null : Double.valueOf(value); - break; - case DecimalType: - result = isNumberNull(value) ? null : new BigDecimal(value); - break; - case DateType: - result = isNumberNull(value) ? null : Date.valueOf(value); - break; - case TimestampType: - result = - isNumberNull(value) - ? null - : Optional.of(value) - .map(Timestamp::valueOf) - .map(Timestamp::toString) - .map(s -> s.endsWith(".0") ? s.substring(0, s.length() - 2) : s) - .orElse(null); - break; - case BinaryType: - result = isNull(value) ? null : value.getBytes(); - break; - default: - result = value; - } - } catch (Exception e) { - logger.debug("Failed to {} switch to dataType:", value, e); - result = value; - } - return result; - } - - public static boolean isLinkisNull(String value) { - return value == null || value.equals(LINKIS_NULL); - } - - public static boolean isNull(String value) { - return value == null || value.equals(NULL_VALUE) || value.trim().equals(""); - } - - public static boolean isNumberNull(String value) { - return value == null || value.equalsIgnoreCase(NULL_VALUE) || value.trim().equals(""); - } - - public static String valueToString(Object value) { - if (value == null) { - return LOWCASE_NULL_VALUE; - } else if (value instanceof BigDecimal) { - return ((BigDecimal) value).toPlainString(); - } else { - return value.toString(); - } - } - - public String getTypeName() { - return typeName; - } - - public int getJavaSQLType() { - return javaSQLType; - } - - @Override - public String toString() { - return typeName; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/Dolphin.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/Dolphin.java deleted file mode 100644 index 35c71295e4..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/Dolphin.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.domain; - -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import java.io.IOException; -import java.io.InputStream; -import java.io.UnsupportedEncodingException; -import java.nio.charset.Charset; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.FAILED_TO_READ_INTEGER; - -public class Dolphin { - private static final Logger logger = LoggerFactory.getLogger(Dolphin.class); - - public static final Charset CHAR_SET = - Charset.forName(StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue()); - public static final String MAGIC = "dolphin"; - - public static byte[] MAGIC_BYTES = new byte[0]; - - static { - try { - MAGIC_BYTES = MAGIC.getBytes(StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue()); - } catch (UnsupportedEncodingException e) { - logger.warn("Dolphin getBytes failed", e); - } - } - - public static final int MAGIC_LEN = MAGIC_BYTES.length; - - public static final String DOLPHIN_FILE_SUFFIX = ".dolphin"; - - public static final String COL_SPLIT = ","; - public static final byte[] COL_SPLIT_BYTES = COL_SPLIT.getBytes(Charset.forName("utf-8")); - public static final int COL_SPLIT_LEN = COL_SPLIT_BYTES.length; - - public static final String NULL = "NULL"; - public static final byte[] NULL_BYTES = "NULL".getBytes(Charset.forName("utf-8")); - - public static final String LINKIS_NULL = "LINKIS_NULL"; - public static final byte[] LINKIS_NULL_BYTES = LINKIS_NULL.getBytes(Charset.forName("utf-8")); - - public static final int INT_LEN = 10; - - public static final int FILE_EMPTY = 31; - - public static byte[] getBytes(Object value) { - return value.toString().getBytes(CHAR_SET); - } - - /** - * Convert a bytes array to a String content 将bytes数组转换为String内容 - * - * @param bytes - * @param start - * @param len - * @return - */ - public static String getString(byte[] bytes, int start, int len) { - return new String(bytes, start, len, Dolphin.CHAR_SET); - } - - public static String toStringValue(String value) { - if (LINKIS_NULL.equals(value)) { - return NULL; - } else { - return value; - } - } - - /** - * Read an integer value that converts the array to a byte of length 10 bytes - * 读取整数值,该值为将数组转换为10字节长度的byte - * - * @param inputStream - * @return - * @throws IOException - */ - public static int readInt(InputStream inputStream) throws IOException { - byte[] bytes = new byte[INT_LEN + 1]; - if (StorageUtils.readBytes(inputStream, bytes, INT_LEN) != INT_LEN) { - throw new StorageWarnException( - FAILED_TO_READ_INTEGER.getErrorCode(), FAILED_TO_READ_INTEGER.getErrorDesc()); - } - return Integer.parseInt(getString(bytes, 0, INT_LEN)); - } - - /** - * Print integers at a fixed length(将整数按固定长度打印) - * - * @param value - * @return - */ - public static byte[] getIntBytes(int value) { - String str = Integer.toString(value); - StringBuilder res = new StringBuilder(); - for (int i = 0; i < INT_LEN - str.length(); i++) { - res.append("0"); - } - res.append(str); - return Dolphin.getBytes(res.toString()); - } - - public static String getType(InputStream inputStream) throws IOException { - byte[] bytes = new byte[100]; - int len = StorageUtils.readBytes(inputStream, bytes, Dolphin.MAGIC_LEN + INT_LEN); - if (len == -1) return null; - return getType(Dolphin.getString(bytes, 0, len)); - } - - public static String getType(String content) { - if (content.length() < MAGIC.length() || !content.substring(0, MAGIC.length()).equals(MAGIC)) { - throw new RuntimeException( - "File header type must be dolphin, content: " + content + " is not"); - } - return Integer.toString( - Integer.parseInt(content.substring(MAGIC.length(), MAGIC.length() + INT_LEN))); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntity.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntity.java deleted file mode 100644 index c1e16e223a..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntity.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.domain; - -/** - * Engine unique Id(engine唯一的Id) - * - *

Fs type(fs类型) - * - *

Create a user to start the corresponding jvm user(创建用户为对应启动的jvm用户) - * - *

Proxy user(代理用户) - * - *

client Ip for whitelist control(ip用于白名单控制) - * - *

Method name called(调用的方法名) - * - *

Method parameter(方法参数) - */ -public class MethodEntity { - private long id; - private String fsType; - private String creatorUser; - private String proxyUser; - private String clientIp; - private String methodName; - private Object[] params; - - public MethodEntity( - long id, - String fsType, - String creatorUser, - String proxyUser, - String clientIp, - String methodName, - Object[] params) { - this.id = id; - this.fsType = fsType; - this.creatorUser = creatorUser; - this.proxyUser = proxyUser; - this.clientIp = clientIp; - this.methodName = methodName; - this.params = params; - } - - public long getId() { - return id; - } - - public void setId(long id) { - this.id = id; - } - - public String getFsType() { - return fsType; - } - - public void setFsType(String fsType) { - this.fsType = fsType; - } - - public String getCreatorUser() { - return creatorUser; - } - - public void setCreatorUser(String creatorUser) { - this.creatorUser = creatorUser; - } - - public String getProxyUser() { - return proxyUser; - } - - public void setProxyUser(String proxyUser) { - this.proxyUser = proxyUser; - } - - public String getClientIp() { - return clientIp; - } - - public void setClientIp(String clientIp) { - this.clientIp = clientIp; - } - - public String getMethodName() { - return methodName; - } - - public void setMethodName(String methodName) { - this.methodName = methodName; - } - - public Object[] getParams() { - return params; - } - - public void setParams(Object[] params) { - this.params = params; - } - - @Override - public String toString() { - return "id:" - + id - + ", methodName:" - + methodName - + ", fsType:" - + fsType - + ", creatorUser:" - + creatorUser - + ", proxyUser:" - + proxyUser - + ", clientIp:" - + clientIp; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntitySerializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntitySerializer.java deleted file mode 100644 index 777b756a7f..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/domain/MethodEntitySerializer.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.domain; - -import java.lang.reflect.Type; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; - -/** - * Serialize MethodEntity to code 序列化MethodEntity为code - * - *

Serialized to code as a MethodEntity object 序列化为code为MethodEntity对象 - * - *

Serialize a java object as a string 序列化java对象为字符串 - * - *

Deserialize a string into a java object 将字符串解序列化为java对象 - */ -public class MethodEntitySerializer { - - private static final Gson gson = - new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").create(); - - /** - * Serialized to code as a MethodEntity object 序列化为code为MethodEntity对象 - * - * @param code - * @return - */ - public static MethodEntity deserializer(String code) { - return gson.fromJson(code, MethodEntity.class); - } - - /** - * Serialize MethodEntity to code 序列化MethodEntity为code - * - * @param methodEntity - * @return - */ - public static String serializer(MethodEntity methodEntity) { - return gson.toJson(methodEntity); - } - - /** - * Serialize a java object as a string 序列化java对象为字符串 - * - * @param value - * @return - */ - public static String serializerJavaObject(Object value) { - return gson.toJson(value); - } - - /** - * Deserialize a string into a java object 将字符串解序列化为java对象 - * - * @param json - * @param classType - * @param - * @return - */ - public static T deserializerToJavaObject(String json, Class classType) { - return gson.fromJson(json, classType); - } - - public static T deserializerToJavaObject(String json, Type oType) { - return gson.fromJson(json, oType); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java index 9ca3425837..7187f72ce5 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/errorcode/LinkisStorageErrorCodeSummary.java @@ -37,6 +37,14 @@ public enum LinkisStorageErrorCodeSummary implements LinkisErrorCode { 52004, "You must register IOMethodInterceptorCreator before you can use proxy mode.(必须先注册IOMethodInterceptorCreator,才能使用代理模式)"), UNSUPPORTED_OPEN_FILE_TYPE(54001, "Unsupported open file type(不支持打开的文件类型)"), + + RESULT_COL_LENGTH(52003, "Col value length {0} exceed limit {1}"), + + RESULT_COLUMN_INDEX_OUT_OF_BOUNDS(52004, "Column index value {0} exceed limit {1}"), + + RESULT_ROW_LENGTH(520034, "Row value length {0} exceed limit {1}"), + + RESULT_COL_SIZE(520035, "Col size length {0} exceed limit {1}"), INVALID_CUSTOM_PARAMETER(65000, "Invalid custom parameter(不合法的自定义参数)"); /** 错误码 */ diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelXlsReader.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelXlsReader.java deleted file mode 100644 index 98df7421f3..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelXlsReader.java +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.excel; - -import org.apache.poi.hssf.eventusermodel.*; -import org.apache.poi.hssf.eventusermodel.EventWorkbookBuilder.SheetRecordCollectingListener; -import org.apache.poi.hssf.eventusermodel.dummyrecord.LastCellOfRowDummyRecord; -import org.apache.poi.hssf.eventusermodel.dummyrecord.MissingCellDummyRecord; -import org.apache.poi.hssf.model.HSSFFormulaParser; -import org.apache.poi.hssf.record.*; -import org.apache.poi.hssf.usermodel.HSSFWorkbook; -import org.apache.poi.poifs.filesystem.POIFSFileSystem; - -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ExcelXlsReader implements HSSFListener { - private static final Logger logger = LoggerFactory.getLogger(ExcelXlsReader.class); - - private int minColumns = -1; - - private POIFSFileSystem fs; - - private InputStream inputStream; - - private int lastRowNumber; - - private int lastColumnNumber; - - /** Should we output the formula, or the value it has? */ - private boolean outputFormulaValues = true; - - /** For parsing Formulas */ - private SheetRecordCollectingListener workbookBuildingListener; - - // excel2003Workbook(工作薄) - private HSSFWorkbook stubWorkbook; - - // Records we pick up as we process - private SSTRecord sstRecord; - - private FormatTrackingHSSFListener formatListener; - - // Table index(表索引) - private int sheetIndex = -1; - - private BoundSheetRecord[] orderedBSRs; - - @SuppressWarnings("unchecked") - private ArrayList boundSheetRecords = new ArrayList(); - - // For handling formulas with string results - private int nextRow; - - private int nextColumn; - - private boolean outputNextStringRecord; - - // Current line(当前行) - private int curRow = 0; - - // a container that stores row records(存储行记录的容器) - private List rowlist = new ArrayList(); - - @SuppressWarnings("unused") - private String sheetName; - - private IExcelRowDeal excelRowDeal; - - public void init(IExcelRowDeal excelRowDeal, InputStream inputStream) { - this.excelRowDeal = excelRowDeal; - this.inputStream = inputStream; - } - - /** - * Traverse all the sheets under excel 遍历excel下所有的sheet - * - * @throws IOException - */ - public void process() throws IOException { - this.fs = new POIFSFileSystem(this.inputStream); - MissingRecordAwareHSSFListener listener = new MissingRecordAwareHSSFListener(this); - formatListener = new FormatTrackingHSSFListener(listener); - HSSFEventFactory factory = new HSSFEventFactory(); - HSSFRequest request = new HSSFRequest(); - if (outputFormulaValues) { - request.addListenerForAllRecords(formatListener); - } else { - workbookBuildingListener = new SheetRecordCollectingListener(formatListener); - request.addListenerForAllRecords(workbookBuildingListener); - } - factory.processWorkbookEvents(request, fs); - } - - /** HSSFListener listener method, processing Record HSSFListener 监听方法,处理 Record */ - @Override - @SuppressWarnings("unchecked") - public void processRecord(Record record) { - int thisRow = -1; - int thisColumn = -1; - String thisStr = null; - String value = null; - switch (record.getSid()) { - case BoundSheetRecord.sid: - boundSheetRecords.add(record); - break; - case BOFRecord.sid: - BOFRecord br = (BOFRecord) record; - if (br.getType() == BOFRecord.TYPE_WORKSHEET) { - // Create a child workbook if needed(如果有需要,则建立子工作薄) - if (workbookBuildingListener != null && stubWorkbook == null) { - stubWorkbook = workbookBuildingListener.getStubHSSFWorkbook(); - } - - sheetIndex++; - if (orderedBSRs == null) { - orderedBSRs = BoundSheetRecord.orderByBofPosition(boundSheetRecords); - } - sheetName = orderedBSRs[sheetIndex].getSheetname(); - } - break; - - case SSTRecord.sid: - sstRecord = (SSTRecord) record; - break; - - case BlankRecord.sid: - BlankRecord brec = (BlankRecord) record; - thisRow = brec.getRow(); - thisColumn = brec.getColumn(); - thisStr = ""; - rowlist.add(thisColumn, thisStr); - break; - case BoolErrRecord.sid: // Cell is boolean(单元格为布尔类型) - BoolErrRecord berec = (BoolErrRecord) record; - thisRow = berec.getRow(); - thisColumn = berec.getColumn(); - thisStr = berec.getBooleanValue() + ""; - rowlist.add(thisColumn, thisStr); - break; - - case FormulaRecord.sid: // Cell is a formula type(单元格为公式类型) - FormulaRecord frec = (FormulaRecord) record; - thisRow = frec.getRow(); - thisColumn = frec.getColumn(); - if (outputFormulaValues) { - if (Double.isNaN(frec.getValue())) { - // Formula result is a string - // This is stored in the next record - outputNextStringRecord = true; - nextRow = frec.getRow(); - nextColumn = frec.getColumn(); - } else { - thisStr = formatListener.formatNumberDateCell(frec); - } - } else { - thisStr = - '"' - + HSSFFormulaParser.toFormulaString(stubWorkbook, frec.getParsedExpression()) - + '"'; - } - rowlist.add(thisColumn, thisStr); - break; - case StringRecord.sid: // a string of formulas in a cell(单元格中公式的字符串) - if (outputNextStringRecord) { - // String for formula - StringRecord srec = (StringRecord) record; - thisStr = srec.getString(); - thisRow = nextRow; - thisColumn = nextColumn; - outputNextStringRecord = false; - } - break; - case LabelRecord.sid: - LabelRecord lrec = (LabelRecord) record; - curRow = thisRow = lrec.getRow(); - thisColumn = lrec.getColumn(); - value = lrec.getValue().trim(); - value = value.equals("") ? " " : value; - this.rowlist.add(thisColumn, value); - break; - case LabelSSTRecord.sid: // Cell is a string type(单元格为字符串类型) - LabelSSTRecord lsrec = (LabelSSTRecord) record; - curRow = thisRow = lsrec.getRow(); - thisColumn = lsrec.getColumn(); - if (sstRecord == null) { - rowlist.add(thisColumn, " "); - } else { - value = sstRecord.getString(lsrec.getSSTIndex()).toString().trim(); - value = value.equals("") ? " " : value; - rowlist.add(thisColumn, value); - } - break; - case NumberRecord.sid: // Cell is a numeric type(单元格为数字类型) - NumberRecord numrec = (NumberRecord) record; - curRow = thisRow = numrec.getRow(); - thisColumn = numrec.getColumn(); - value = formatListener.formatNumberDateCell(numrec).trim(); - value = value.equals("") ? "0" : value; - // Add column values to the container(向容器加入列值) - rowlist.add(thisColumn, value); - break; - default: - break; - } - - // Encountered a new line of operations(遇到新行的操作)( - if (thisRow != -1 && thisRow != lastRowNumber) { - lastColumnNumber = -1; - } - - // Null operation(空值的操作) - if (record instanceof MissingCellDummyRecord) { - MissingCellDummyRecord mc = (MissingCellDummyRecord) record; - curRow = thisRow = mc.getRow(); - thisColumn = mc.getColumn(); - rowlist.add(thisColumn, " "); - } - - // Update row and column values(更新行和列的值) - if (thisRow > -1) lastRowNumber = thisRow; - if (thisColumn > -1) lastColumnNumber = thisColumn; - - // End of line operation(行结束时的操作) - if (record instanceof LastCellOfRowDummyRecord) { - if (minColumns > 0) { - // Column value is re-empted(列值重新置空) - if (lastColumnNumber == -1) { - lastColumnNumber = 0; - } - } - lastColumnNumber = -1; - - // At the end of each line, the dealRow() method(每行结束时, dealRow() 方法) - excelRowDeal.dealRow(orderedBSRs, sheetIndex, curRow, rowlist); - // Empty container(清空容器) - rowlist.clear(); - } - } - - public void close() { - try { - if (fs != null) { - fs.close(); - } - } catch (Exception e) { - logger.info("ExcelXlsReader fs closed failed", e); - } - - try { - if (inputStream != null) { - inputStream.close(); - } - } catch (IOException e) { - logger.info("ExcelXlsReader inputStream closed failed", e); - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/FirstRowDeal.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/FirstRowDeal.java deleted file mode 100644 index 6924a3ebb9..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/FirstRowDeal.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.excel; - -import org.apache.poi.hssf.record.BoundSheetRecord; - -import java.util.ArrayList; -import java.util.List; - -class FirstRowDeal implements IExcelRowDeal { - - private List sheetNames = new ArrayList<>(); - private List row; - - public List getSheetNames() { - return sheetNames; - } - - public void setSheetNames(List sheetNames) { - this.sheetNames = sheetNames; - } - - public List getRow() { - return row; - } - - public void setRow(List row) { - this.row = row; - } - - @Override - public void dealRow( - BoundSheetRecord[] orderedBSRs, int sheetIndex, int curRow, List rowlist) { - for (BoundSheetRecord record : orderedBSRs) { - sheetNames.add(record.getSheetname()); - } - row = rowlist; - throw new ExcelAnalysisException("Finished to deal first row"); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/RowToCsvDeal.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/RowToCsvDeal.java deleted file mode 100644 index 7deccfb92a..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/RowToCsvDeal.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.excel; - -import org.apache.poi.hssf.record.BoundSheetRecord; - -import java.io.IOException; -import java.io.OutputStream; -import java.util.List; -import java.util.Map; - -class RowToCsvDeal implements IExcelRowDeal { - - private Map params; - private List sheetNames; - private OutputStream outputStream; - private Boolean hasHeader; - private Boolean fisrtRow = true; - - public void init(Boolean hasHeader, List sheetNames, OutputStream outputStream) { - this.hasHeader = hasHeader; - this.sheetNames = sheetNames; - this.outputStream = outputStream; - } - - @Override - public void dealRow( - BoundSheetRecord[] orderedBSRs, int sheetIndex, int curRow, List rowlist) { - String sheetName = orderedBSRs[sheetIndex].getSheetname(); - if (sheetNames == null || sheetNames.isEmpty() || sheetNames.contains(sheetName)) { - if (!(curRow == 0 && hasHeader)) { - try { - if (fisrtRow) { - fisrtRow = false; - } else { - outputStream.write("\n".getBytes()); - } - int len = rowlist.size(); - for (int i = 0; i < len; i++) { - outputStream.write(rowlist.get(i).replaceAll("\n|\t", " ").getBytes("utf-8")); - if (i < len - 1) { - outputStream.write("\t".getBytes()); - } - } - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageExcelWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageExcelWriter.java deleted file mode 100644 index 6b2a98c72b..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageExcelWriter.java +++ /dev/null @@ -1,310 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.excel; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; - -import org.apache.commons.io.IOUtils; -import org.apache.poi.ss.usermodel.*; -import org.apache.poi.xssf.streaming.SXSSFSheet; -import org.apache.poi.xssf.streaming.SXSSFWorkbook; - -import java.io.*; -import java.math.BigDecimal; -import java.util.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StorageExcelWriter extends ExcelFsWriter { - - private static Logger logger = LoggerFactory.getLogger(StorageExcelWriter.class); - - private String charset; - private String sheetName; - private String dateFormat; - private OutputStream outputStream; - private boolean autoFormat; - protected SXSSFWorkbook workBook; - protected SXSSFSheet sheet; - private DataFormat format; - protected DataType[] types; - protected int rowPoint; - protected int columnCounter; - protected Map styles = new HashMap<>(); - private boolean isFlush = true; - private ByteArrayOutputStream os = new ByteArrayOutputStream(); - private ByteArrayInputStream is; - - public StorageExcelWriter( - String charset, - String sheetName, - String dateFormat, - OutputStream outputStream, - boolean autoFormat) { - this.charset = charset; - this.sheetName = sheetName; - this.dateFormat = dateFormat; - this.outputStream = outputStream; - this.autoFormat = autoFormat; - } - - public void init() { - workBook = new SXSSFWorkbook(); - sheet = workBook.createSheet(sheetName); - } - - public CellStyle getDefaultHeadStyle() { - Font headerFont = workBook.createFont(); - headerFont.setBold(true); - headerFont.setFontHeightInPoints((short) 14); - headerFont.setColor(IndexedColors.RED.getIndex()); - CellStyle headerCellStyle = workBook.createCellStyle(); - headerCellStyle.setFont(headerFont); - return headerCellStyle; - } - - public Workbook getWorkBook() { - // 自适应列宽 - sheet.trackAllColumnsForAutoSizing(); - for (int elem = 0; elem <= columnCounter; elem++) { - sheet.autoSizeColumn(elem); - } - return workBook; - } - - public CellStyle createCellStyle(DataType dataType) { - CellStyle style = workBook.createCellStyle(); - format = workBook.createDataFormat(); - style.setDataFormat(format.getFormat("@")); - - if (autoFormat) { - switch (dataType) { - case StringType: - case CharType: - case VarcharType: - style.setDataFormat(format.getFormat("@")); - break; - case TinyIntType: - case ShortIntType: - case IntType: - style.setDataFormat(format.getFormat("#")); - break; - case LongType: - case BigIntType: - style.setDataFormat(format.getFormat("#.##E+00")); - break; - case FloatType: - style.setDataFormat(format.getFormat("#.0000000000")); - break; - case DoubleType: - style.setDataFormat(format.getFormat("#.0000000000")); - break; - case DateType: - case TimestampType: - style.setDataFormat(format.getFormat("m/d/yy h:mm")); - break; - case DecimalType: - case BigDecimalType: - style.setDataFormat(format.getFormat("#.000000000")); - break; - default: - style.setDataFormat(format.getFormat("@")); - } - } - return style; - } - - public CellStyle getCellStyle(DataType dataType) { - CellStyle style = styles.get(dataType.getTypeName()); - if (style == null) { - CellStyle newStyle = createCellStyle(dataType); - styles.put(dataType.getTypeName(), newStyle); - return newStyle; - } else { - return style; - } - } - - @Override - public void addMetaData(MetaData metaData) throws IOException { - init(); - Row tableHead = sheet.createRow(0); - Column[] columns = ((TableMetaData) metaData).getColumns(); - List columnType = new ArrayList<>(); - for (int i = 0; i < columns.length; i++) { - Cell headCell = tableHead.createCell(columnCounter); - headCell.setCellValue(columns[i].getColumnName()); - headCell.setCellStyle(getDefaultHeadStyle()); - columnType.add(columns[i].getDataType()); - columnCounter++; - } - types = columnType.toArray(new DataType[0]); - rowPoint++; - } - - @Override - public void addRecord(Record record) throws IOException { - // TODO: 是否需要替换null值 - Row tableBody = sheet.createRow(rowPoint); - int colunmPoint = 0; - Object[] excelRecord = ((TableRecord) record).row; - for (Object elem : excelRecord) { - Cell cell = tableBody.createCell(colunmPoint); - DataType dataType = types[colunmPoint]; - if (autoFormat) { - setCellTypeValue(dataType, elem, cell); - } else { - cell.setCellValue(DataType.valueToString(elem)); - } - cell.setCellStyle(getCellStyle(dataType)); - colunmPoint++; - } - rowPoint++; - } - - private void setCellTypeValue(DataType dataType, Object elem, Cell cell) { - if (null == elem) return; - - try { - switch (dataType) { - case StringType: - case CharType: - case VarcharType: - cell.setCellValue(DataType.valueToString(elem)); - break; - case TinyIntType: - case ShortIntType: - case IntType: - cell.setCellValue(Integer.valueOf(elem.toString())); - break; - case LongType: - case BigIntType: - cell.setCellValue(Long.valueOf(elem.toString())); - break; - case FloatType: - cell.setCellValue(Float.valueOf(elem.toString())); - break; - case DoubleType: - doubleCheck(elem.toString()); - cell.setCellValue(Double.valueOf(elem.toString())); - break; - case DateType: - case TimestampType: - cell.setCellValue(getDate(elem)); - break; - case DecimalType: - case BigDecimalType: - doubleCheck(DataType.valueToString(elem)); - cell.setCellValue(Double.valueOf(DataType.valueToString(elem))); - break; - default: - cell.setCellValue(DataType.valueToString(elem)); - } - } catch (Exception e) { - cell.setCellValue(DataType.valueToString(elem)); - } - } - - private Date getDate(Object value) { - if (value instanceof Date) { - return (Date) value; - } else { - throw new NumberFormatException( - "Value " - + value - + " with class : " - + value.getClass().getName() - + " is not a valid type of Date."); - } - } - - /** - * Check whether the double exceeds the number of digits, which will affect the data accuracy - * - * @param elemValue - */ - private void doubleCheck(String elemValue) { - BigDecimal value = new BigDecimal(elemValue).stripTrailingZeros(); - if ((value.precision() - value.scale()) > 15) { - throw new NumberFormatException( - "Value " + elemValue + " error : This data exceeds 15 significant digits."); - } - } - - @Override - public void flush() { - try { - getWorkBook().write(os); - } catch (IOException e) { - logger.warn("flush fail", e); - } - byte[] content = os.toByteArray(); - is = new ByteArrayInputStream(content); - byte[] buffer = new byte[1024]; - int bytesRead = 0; - while (isFlush) { - try { - bytesRead = is.read(buffer, 0, 1024); - if (bytesRead == -1) { - isFlush = false; - } else { - outputStream.write(buffer, 0, bytesRead); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } - - @Override - public void close() { - if (isFlush) { - flush(); - } - IOUtils.closeQuietly(outputStream); - IOUtils.closeQuietly(is); - IOUtils.closeQuietly(os); - IOUtils.closeQuietly(workBook); - } - - @Override - public String getCharset() { - return this.charset; - } - - @Override - public String getSheetName() { - return this.sheetName; - } - - @Override - public String getDateFormat() { - return this.dateFormat; - } - - @Override - public boolean isAutoFormat() { - return this.autoFormat; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java index 246fb79bc3..39d89c3d96 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java @@ -21,12 +21,17 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.poi.hssf.usermodel.HSSFWorkbook; +import org.apache.poi.openxml4j.util.ZipSecureFile; +import org.apache.poi.ss.usermodel.Cell; +import org.apache.poi.ss.usermodel.Row; +import org.apache.poi.ss.usermodel.Sheet; +import org.apache.poi.ss.usermodel.Workbook; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.ArrayList; -import java.util.List; +import java.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,7 +66,7 @@ public static String excelToCsv( throws Exception { String hdfsPath = "/tmp/" + StorageUtils.getJvmUser() + "/" + System.currentTimeMillis() + ".csv"; - LOG.info("The excel to csv with hdfsPath:" + hdfsPath); + LOG.info("The excel to csv with hdfs path:" + hdfsPath); ExcelXlsReader xlsReader = new ExcelXlsReader(); RowToCsvDeal rowToCsvDeal = new RowToCsvDeal(); OutputStream out = null; @@ -81,4 +86,44 @@ public static String excelToCsv( } return hdfsPath; } + + public static Map>> getSheetsInfo( + InputStream inputStream, Boolean hasHeader) { + // use xls file + Workbook workbook = null; + try { + // 压缩膨胀比率,处理excel行或者列过多的情况,不能设置再小了,会导致内存过大 + ZipSecureFile.setMinInflateRatio(0.005); + workbook = new HSSFWorkbook(inputStream); + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + // 使用完最后需要还原 + ZipSecureFile.setMinInflateRatio(0.01); + } + Map>> res = new LinkedHashMap<>(workbook.getNumberOfSheets()); + // foreach Sheet + for (int i = 0; i < workbook.getNumberOfSheets(); i++) { + Sheet sheet = workbook.getSheetAt(i); + + List> rowList = new ArrayList<>(); + + // get first row as column name + Row headerRow = sheet.getRow(0); + + // foreach column + for (int j = 0; j < headerRow.getPhysicalNumberOfCells(); j++) { + Map sheetMap = new LinkedHashMap<>(); + Cell cell = headerRow.getCell(j); + if (hasHeader) { + sheetMap.put(cell.getStringCellValue(), "string"); + } else { + sheetMap.put("col_" + (j + 1), "string"); + } + rowList.add(sheetMap); + } + res.put(sheet.getSheetName(), rowList); + } + return res; + } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java index 7cbf579a9d..c2418f4c33 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsxUtils.java @@ -23,10 +23,9 @@ import org.apache.poi.ss.usermodel.Workbook; import java.io.File; +import java.io.IOException; import java.io.InputStream; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; +import java.util.*; import com.github.pjfanning.xlsx.StreamingReader; @@ -79,4 +78,54 @@ public static List> getBasicInfo(InputStream inputStream, File file } } } + + public static Map>> getAllSheetInfo( + InputStream inputStream, File file, Boolean hasHeader) throws IOException { + try { + Workbook wb = null; + if (inputStream != null) { + wb = + StreamingReader.builder() + // number of rows to keep in memory (defaults to 10) + .rowCacheSize(2) + .open(inputStream); + } else { + wb = + StreamingReader.builder() + // number of rows to keep in memory (defaults to 10) + .rowCacheSize(2) + .open(file); + } + Map>> res = new LinkedHashMap<>(wb.getNumberOfSheets()); + for (Sheet sheet : wb) { + Iterator iterator = sheet.iterator(); + Row row = null; + while (iterator.hasNext() && row == null) { + row = iterator.next(); + } + List> rowList = new ArrayList<>(); + if (row == null) { + res.put(sheet.getSheetName(), rowList); + continue; + } + int cellIdx = 0; + for (Cell cell : row) { + Map item = new LinkedHashMap<>(); + if (hasHeader) { + item.put(cell.getStringCellValue(), "string"); + } else { + item.put("col_" + (cellIdx + 1), "string"); + } + cellIdx++; + rowList.add(item); + } + res.put(sheet.getSheetName(), rowList); + } + return res; + } finally { + if (inputStream != null) { + inputStream.close(); + } + } + } } diff --git a/linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/ExecuteException.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColLengthExceedException.java similarity index 65% rename from linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/ExecuteException.java rename to linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColLengthExceedException.java index dac32f0984..955a8e1d60 100644 --- a/linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/ExecuteException.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColLengthExceedException.java @@ -15,12 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.manager.engineplugin.python.exception; +package org.apache.linkis.storage.exception; -import org.apache.linkis.common.exception.ErrorException; +public class ColLengthExceedException extends StorageWarnException { -public class ExecuteException extends ErrorException { - public ExecuteException(int errCode, String desc) { + public ColLengthExceedException(int errCode, String desc) { super(errCode, desc); } + + public ColLengthExceedException( + int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + public ColLengthExceedException(int errCode, String desc, Throwable t) { + super(errCode, desc, t); + } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColumnIndexExceedException.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColumnIndexExceedException.java new file mode 100644 index 0000000000..969b19d20b --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/ColumnIndexExceedException.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.exception; + +public class ColumnIndexExceedException extends StorageWarnException { + + public ColumnIndexExceedException(int errCode, String desc) { + super(errCode, desc); + } + + public ColumnIndexExceedException( + int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + public ColumnIndexExceedException(int errCode, String desc, Throwable t) { + super(errCode, desc, t); + } +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java index 7a8452141b..ae66c1cf99 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java @@ -21,10 +21,8 @@ public enum StorageErrorCode { /** */ FS_NOT_INIT(53001, "please init first"), - INCONSISTENT_DATA(53001, "Inconsistent row data read,read %s,need rowLen %s"), FS_OOM(53002, "OOM occurred while reading the file"), - FS_ERROR(53003, "Failed to operation fs"), READ_PARQUET_FAILED(53004, "Failed to read parquet file"), diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java index 8103c6f3de..9f53a6249b 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildHDFSFileSystem.java @@ -21,7 +21,7 @@ import org.apache.linkis.storage.factory.BuildFactory; import org.apache.linkis.storage.fs.FileSystem; import org.apache.linkis.storage.fs.impl.HDFSFileSystem; -import org.apache.linkis.storage.io.IOMethodInterceptorFactory; +import org.apache.linkis.storage.io.IOMethodInterceptorCreator$; import org.apache.linkis.storage.utils.StorageUtils; import org.springframework.cglib.proxy.Enhancer; @@ -46,7 +46,7 @@ public Fs getFs(String user, String proxyUser) { // TODO Agent user(代理的用户) Enhancer enhancer = new Enhancer(); enhancer.setSuperclass(HDFSFileSystem.class.getSuperclass()); - enhancer.setCallback(IOMethodInterceptorFactory.getIOMethodInterceptor(fsName())); + enhancer.setCallback(IOMethodInterceptorCreator$.MODULE$.getIOMethodInterceptor(fsName())); fs = (FileSystem) enhancer.create(); } fs.setUser(proxyUser); @@ -63,6 +63,6 @@ public Fs getFs(String user, String proxyUser, String label) { @Override public String fsName() { - return StorageUtils.HDFS; + return StorageUtils.HDFS(); } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java index bcd61c5735..ef88cec36d 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildLocalFileSystem.java @@ -21,7 +21,7 @@ import org.apache.linkis.storage.factory.BuildFactory; import org.apache.linkis.storage.fs.FileSystem; import org.apache.linkis.storage.fs.impl.LocalFileSystem; -import org.apache.linkis.storage.io.IOMethodInterceptorFactory; +import org.apache.linkis.storage.io.IOMethodInterceptorCreator$; import org.apache.linkis.storage.utils.StorageConfiguration; import org.apache.linkis.storage.utils.StorageUtils; @@ -34,7 +34,7 @@ public Fs getFs(String user, String proxyUser) { FileSystem fs = null; if (StorageUtils.isIOProxy()) { if (user.equals(proxyUser)) { - if ((Boolean) StorageConfiguration.IS_SHARE_NODE.getValue()) { + if ((Boolean) StorageConfiguration.IS_SHARE_NODE().getValue()) { fs = new LocalFileSystem(); } else { fs = getProxyFs(); @@ -58,12 +58,12 @@ public Fs getFs(String user, String proxyUser, String label) { private FileSystem getProxyFs() { Enhancer enhancer = new Enhancer(); enhancer.setSuperclass(LocalFileSystem.class.getSuperclass()); - enhancer.setCallback(IOMethodInterceptorFactory.getIOMethodInterceptor(fsName())); + enhancer.setCallback(IOMethodInterceptorCreator$.MODULE$.getIOMethodInterceptor(fsName())); return (FileSystem) enhancer.create(); } @Override public String fsName() { - return StorageUtils.FILE; + return StorageUtils.FILE(); } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildOSSSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildOSSSystem.java deleted file mode 100644 index ba1bd7abef..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildOSSSystem.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.factory.impl; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.storage.factory.BuildFactory; -import org.apache.linkis.storage.fs.impl.OSSFileSystem; -import org.apache.linkis.storage.utils.StorageUtils; - -import java.io.IOException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class BuildOSSSystem implements BuildFactory { - - private static final Logger LOG = LoggerFactory.getLogger(BuildOSSSystem.class); - - /** - * get file system - * - * @param user - * @param proxyUser - * @return - */ - @Override - public Fs getFs(String user, String proxyUser) { - OSSFileSystem fs = new OSSFileSystem(); - try { - fs.init(null); - } catch (IOException e) { - LOG.warn("get file system failed", e); - } - fs.setUser(user); - return fs; - } - - @Override - public Fs getFs(String user, String proxyUser, String label) { - OSSFileSystem fs = new OSSFileSystem(); - try { - fs.init(null); - } catch (IOException e) { - LOG.warn("get file system failed", e); - } - fs.setUser(proxyUser); - fs.setLabel(label); - return fs; - } - - @Override - public String fsName() { - return StorageUtils.OSS; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildS3FileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildS3FileSystem.java deleted file mode 100644 index 44082e5898..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildS3FileSystem.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.factory.impl; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.storage.factory.BuildFactory; -import org.apache.linkis.storage.fs.impl.S3FileSystem; -import org.apache.linkis.storage.utils.StorageUtils; - -import java.io.IOException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class BuildS3FileSystem implements BuildFactory { - private static final Logger LOG = LoggerFactory.getLogger(BuildS3FileSystem.class); - - @Override - public Fs getFs(String user, String proxyUser) { - S3FileSystem fs = new S3FileSystem(); - try { - fs.init(null); - } catch (IOException e) { - LOG.warn("get file system failed", e); - } - fs.setUser(user); - return fs; - } - - @Override - public Fs getFs(String user, String proxyUser, String label) { - S3FileSystem fs = new S3FileSystem(); - try { - fs.init(null); - } catch (IOException e) { - LOG.warn("get file system failed", e); - } - fs.setUser(user); - fs.setLabel(label); - return fs; - } - - @Override - public String fsName() { - return StorageUtils.S3; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java index 3067383b6c..ac828267bf 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java @@ -51,6 +51,10 @@ public String getDefaultFolderPerm() { public abstract long getUsableSpace(FsPath dest) throws IOException; + public abstract long getLength(FsPath dest) throws IOException; + + public abstract String checkSum(FsPath dest) throws IOException; + public abstract boolean canExecute(FsPath dest) throws IOException; public abstract boolean setOwner(FsPath dest, String user, String group) throws IOException; @@ -99,7 +103,7 @@ protected FsPath getParentPath(String path) { } else { parentPath = path.substring(0, path.lastIndexOf("/")); } - LOG.info("Get Parent Path:" + parentPath); + LOG.info("Get parent path:" + parentPath); return new FsPath(parentPath); } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java index f40d75c040..c4f4814149 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/HDFSFileSystem.java @@ -31,9 +31,7 @@ import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.UserGroupInformation; @@ -82,7 +80,7 @@ public long getUsableSpace(FsPath dest) throws IOException { @Override public boolean canExecute(FsPath dest) throws IOException { - return canAccess(dest, FsAction.EXECUTE); + return canAccess(dest, FsAction.EXECUTE, this.user); } @Override @@ -162,7 +160,8 @@ public FsPathListWithError listPathWithError(FsPath path) throws IOException { List fsPaths = new ArrayList(); for (FileStatus f : stat) { fsPaths.add( - fillStorageFile(new FsPath(StorageUtils.HDFS_SCHEMA + f.getPath().toUri().getPath()), f)); + fillStorageFile( + new FsPath(StorageUtils.HDFS_SCHEMA() + f.getPath().toUri().getPath()), f)); } if (fsPaths.isEmpty()) { return null; @@ -174,35 +173,39 @@ public FsPathListWithError listPathWithError(FsPath path) throws IOException { @Override public void init(Map properties) throws IOException { if (MapUtils.isNotEmpty(properties) - && properties.containsKey(StorageConfiguration.PROXY_USER.key())) { - user = StorageConfiguration.PROXY_USER.getValue(properties); + && properties.containsKey(StorageConfiguration.PROXY_USER().key())) { + user = StorageConfiguration.PROXY_USER().getValue(properties); + properties.remove(StorageConfiguration.PROXY_USER().key()); } if (user == null) { throw new IOException("User cannot be empty(用户不能为空)"); } - - if (label == null && (boolean) Configuration.IS_MULTIPLE_YARN_CLUSTER()) { - label = StorageConfiguration.LINKIS_STORAGE_FS_LABEL.getValue(); + if (label == null && Configuration.IS_MULTIPLE_YARN_CLUSTER()) { + label = StorageConfiguration.LINKIS_STORAGE_FS_LABEL().getValue(); } - conf = HDFSUtils.getConfigurationByLabel(user, label); - + /** if properties is null do not to create conf */ if (MapUtils.isNotEmpty(properties)) { - for (String key : properties.keySet()) { - String v = properties.get(key); - if (StringUtils.isNotEmpty(v)) { - conf.set(key, v); + conf = HDFSUtils.getConfigurationByLabel(user, label); + if (MapUtils.isNotEmpty(properties)) { + for (String key : properties.keySet()) { + String v = properties.get(key); + if (StringUtils.isNotEmpty(v)) { + conf.set(key, v); + } } } } - if (StorageConfiguration.FS_CACHE_DISABLE.getValue()) { - conf.set("fs.hdfs.impl.disable.cache", "true"); + if (null != conf) { + fs = HDFSUtils.getHDFSUserFileSystem(user, label, conf); + } else { + fs = HDFSUtils.getHDFSUserFileSystem(user, label); } - fs = HDFSUtils.getHDFSUserFileSystem(user, label, conf); + if (fs == null) { throw new IOException("init HDFS FileSystem failed!"); } - if (StorageConfiguration.FS_CHECKSUM_DISBALE.getValue()) { + if (StorageConfiguration.FS_CHECKSUM_DISBALE().getValue()) { fs.setVerifyChecksum(false); fs.setWriteChecksum(false); } @@ -215,7 +218,7 @@ public String fsName() { @Override public String rootUserName() { - return StorageConfiguration.HDFS_ROOT_USER.getValue(); + return StorageConfiguration.HDFS_ROOT_USER().getValue(); } @Override @@ -248,7 +251,6 @@ public OutputStream write(FsPath dest, boolean overwrite) throws IOException { return fs.append(new Path(path)); } else { OutputStream out = fs.create(new Path(path), true); - this.setPermission(dest, this.getDefaultFilePerm()); return out; } } @@ -293,17 +295,16 @@ public List list(FsPath path) throws IOException { @Override public boolean canRead(FsPath dest) throws IOException { - return canAccess(dest, FsAction.READ); + return canAccess(dest, FsAction.READ, this.user); } - @Override - public boolean canRead(FsPath fsPath, String s) throws IOException { - return false; + public boolean canRead(FsPath dest, String user) throws IOException { + return canAccess(dest, FsAction.READ, user); } @Override public boolean canWrite(FsPath dest) throws IOException { - return canAccess(dest, FsAction.WRITE); + return canAccess(dest, FsAction.WRITE, this.user); } @Override @@ -313,10 +314,10 @@ public boolean exists(FsPath dest) throws IOException { } catch (IOException e) { String message = e.getMessage(); String rootCauseMessage = ExceptionUtils.getRootCauseMessage(e); - if ((message != null && message.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS)) + if ((message != null && message.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS())) || (rootCauseMessage != null - && rootCauseMessage.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS))) { - logger.info("Failed to execute exists, retry", e); + && rootCauseMessage.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS()))) { + logger.info("Failed to execute exists for user {}, retry", user, e); resetRootHdfs(); return fs.exists(new Path(checkHDFSPath(dest.getPath()))); } else { @@ -334,8 +335,12 @@ private void resetRootHdfs() { } else { HDFSUtils.closeHDFSFIleSystem(fs, user, label); } - logger.warn(user + "FS reset close."); - fs = HDFSUtils.getHDFSUserFileSystem(user, label, conf); + logger.warn("{} FS reset close.", user); + if (null != conf) { + fs = HDFSUtils.getHDFSUserFileSystem(user, label, conf); + } else { + fs = HDFSUtils.getHDFSUserFileSystem(user, label); + } } } } @@ -362,7 +367,7 @@ public boolean renameTo(FsPath oldDest, FsPath newDest) throws IOException { @Override public void close() throws IOException { if (null != fs) { - HDFSUtils.closeHDFSFIleSystem(fs, user, label); + HDFSUtils.closeHDFSFIleSystem(fs, user); } else { logger.warn("FS was null, cannot close."); } @@ -388,7 +393,7 @@ private FsPath fillStorageFile(FsPath fsPath, FileStatus fileStatus) throws IOEx return fsPath; } - private boolean canAccess(FsPath fsPath, FsAction access) throws IOException { + private boolean canAccess(FsPath fsPath, FsAction access, String user) throws IOException { String path = checkHDFSPath(fsPath.getPath()); if (!exists(fsPath)) { throw new IOException("directory or file not exists: " + path); @@ -396,12 +401,12 @@ private boolean canAccess(FsPath fsPath, FsAction access) throws IOException { FileStatus f = fs.getFileStatus(new Path(path)); FsPermission permission = f.getPermission(); - UserGroupInformation ugi = HDFSUtils.getUserGroupInformation(user, label); + UserGroupInformation ugi = HDFSUtils.getUserGroupInformation(user); String[] groupNames; try { groupNames = ugi.getGroupNames(); } catch (NullPointerException e) { - if ((Boolean) Configuration.IS_TEST_MODE().getValue()) { + if ((Boolean) org.apache.linkis.common.conf.Configuration.IS_TEST_MODE().getValue()) { groupNames = new String[] {"hadoop"}; } else { throw e; @@ -433,9 +438,9 @@ public void setLabel(String label) { private String checkHDFSPath(String path) { try { - boolean checkHdfsPath = (boolean) StorageConfiguration.HDFS_PATH_PREFIX_CHECK_ON.getValue(); + boolean checkHdfsPath = (boolean) StorageConfiguration.HDFS_PATH_PREFIX_CHECK_ON().getValue(); if (checkHdfsPath) { - boolean rmHdfsPrefix = (boolean) StorageConfiguration.HDFS_PATH_PREFIX_REMOVE.getValue(); + boolean rmHdfsPrefix = (boolean) StorageConfiguration.HDFS_PATH_PREFIX_REMOVE().getValue(); if (rmHdfsPrefix) { if (StringUtils.isBlank(path)) { return path; @@ -471,4 +476,21 @@ private String checkHDFSPath(String path) { } return path; } + + @Override + public long getLength(FsPath dest) throws IOException { + FileStatus fileStatus = fs.getFileStatus(new Path(checkHDFSPath(dest.getPath()))); + return fileStatus.getLen(); + } + + @Override + public String checkSum(FsPath dest) throws IOException { + String path = checkHDFSPath(dest.getPath()); + if (!exists(dest)) { + throw new IOException("directory or file not exists: " + path); + } + MD5MD5CRC32FileChecksum fileChecksum = + (MD5MD5CRC32FileChecksum) fs.getFileChecksum(new Path(path)); + return fileChecksum.toString().split(":")[1]; + } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java index 9516e90361..a03a25950e 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java @@ -47,12 +47,7 @@ import java.nio.file.attribute.PosixFilePermissions; import java.nio.file.attribute.UserPrincipal; import java.nio.file.attribute.UserPrincipalLookupService; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.Stack; +import java.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -113,7 +108,7 @@ public boolean setOwner(FsPath dest, String user, String group) throws IOExcepti if (group != null) { setGroup(dest, group); } - setGroup(dest, StorageConfiguration.STORAGE_USER_GROUP.getValue()); + setGroup(dest, StorageConfiguration.STORAGE_USER_GROUP().getValue()); return true; } @@ -136,7 +131,7 @@ public boolean setOwner(FsPath dest, String user) throws IOException { @Override public boolean setGroup(FsPath dest, String group) throws IOException { - LOG.info("Set group with path:" + dest.getPath() + "and group:" + user); + LOG.info("Set group with path:" + dest.getPath() + " and group:" + group); if (!StorageUtils.isIOProxy()) { LOG.info("io not proxy, setGroup skip"); return true; @@ -262,9 +257,17 @@ public FsPathListWithError listPathWithError(FsPath path) throws IOException { LOG.info("Try to list path:" + path.getPath() + " with error msg"); if (files != null) { List rtn = new ArrayList(); + Set fileNameSet = new HashSet<>(); + fileNameSet.add(path.getPath().trim()); String message = ""; for (File f : files) { try { + if (fileNameSet.contains(f.getPath())) { + LOG.info("File {} is duplicate", f.getPath()); + continue; + } else { + fileNameSet.add(f.getParent().trim()); + } rtn.add(get(f.getPath())); } catch (Throwable e) { LOG.warn("Failed to list path:", e); @@ -289,15 +292,15 @@ public void init(Map properties) throws IOException { if (MapUtils.isNotEmpty(properties)) { this.properties = properties; - if (properties.containsKey(StorageConfiguration.PROXY_USER.key())) { - user = StorageConfiguration.PROXY_USER.getValue(properties); + if (properties.containsKey(StorageConfiguration.PROXY_USER().key())) { + user = StorageConfiguration.PROXY_USER().getValue(properties); } - group = StorageConfiguration.STORAGE_USER_GROUP.getValue(properties); + group = StorageConfiguration.STORAGE_USER_GROUP().getValue(properties); } else { this.properties = new HashMap(); } if (FsPath.WINDOWS) { - group = StorageConfiguration.STORAGE_USER_GROUP.getValue(properties); + group = StorageConfiguration.STORAGE_USER_GROUP().getValue(properties); } if (StringUtils.isEmpty(group)) { String groupInfo; @@ -320,7 +323,7 @@ public String fsName() { @Override public String rootUserName() { - return StorageConfiguration.LOCAL_ROOT_USER.getValue(); + return StorageConfiguration.LOCAL_ROOT_USER().getValue(); } @Override @@ -423,7 +426,7 @@ public boolean canRead(FsPath dest) throws IOException { } @Override - public boolean canRead(FsPath fsPath, String s) throws IOException { + public boolean canRead(FsPath dest, String user) throws IOException { return false; } @@ -494,4 +497,16 @@ private String getOwner(String path) throws IOException { PosixFileAttributes attr = Files.readAttributes(Paths.get(path), PosixFileAttributes.class); return attr.owner().getName(); } + + @Override + public long getLength(FsPath dest) throws IOException { + String path = dest.getPath(); + LOG.info("Get file length with path:" + path); + return new File(path).length(); + } + + @Override + public String checkSum(FsPath dest) { + return null; + } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/OSSFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/OSSFileSystem.java deleted file mode 100644 index 2d52b83049..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/OSSFileSystem.java +++ /dev/null @@ -1,398 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.fs.impl; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.hadoop.common.utils.HDFSUtils; -import org.apache.linkis.storage.conf.LinkisStorageConf; -import org.apache.linkis.storage.domain.FsPathListWithError; -import org.apache.linkis.storage.fs.FileSystem; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.collections.MapUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem; -import org.apache.hadoop.fs.permission.FsAction; -import org.apache.hadoop.fs.permission.FsPermission; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import com.google.common.collect.Maps; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class OSSFileSystem extends FileSystem { - - public static final String OSS_PREFIX = "oss://"; - private org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem fs = null; - private Configuration conf = null; - - private String label = null; - - private static final Logger logger = LoggerFactory.getLogger(OSSFileSystem.class); - - /** File System abstract method start */ - @Override - public String listRoot() throws IOException { - return "/"; - } - - @Override - public long getTotalSpace(FsPath dest) throws IOException { - return 0; - } - - @Override - public long getFreeSpace(FsPath dest) throws IOException { - return 0; - } - - @Override - public long getUsableSpace(FsPath dest) throws IOException { - return 0; - } - - @Override - public boolean canExecute(FsPath dest) throws IOException { - return true; - } - - @Override - public boolean setOwner(FsPath dest, String user, String group) throws IOException { - return true; - } - - @Override - public boolean setOwner(FsPath dest, String user) throws IOException { - return true; - } - - @Override - public boolean setGroup(FsPath dest, String group) throws IOException { - return true; - } - - @Override - public boolean mkdir(FsPath dest) throws IOException { - String path = checkOSSPath(dest.getPath()); - if (!canExecute(getParentPath(path))) { - throw new IOException("You have not permission to access path " + path); - } - boolean result = - fs.mkdirs(new Path(path), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); - this.setPermission(new FsPath(path), this.getDefaultFolderPerm()); - return result; - } - - @Override - public boolean mkdirs(FsPath dest) throws IOException { - String path = checkOSSPath(dest.getPath()); - FsPath parentPath = getParentPath(path); - while (!exists(parentPath)) { - parentPath = getParentPath(parentPath.getPath()); - } - return fs.mkdirs(new Path(path), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); - } - - @Override - public boolean setPermission(FsPath dest, String permission) throws IOException { - return true; - } - - @Override - public FsPathListWithError listPathWithError(FsPath path) throws IOException { - FileStatus[] stat = fs.listStatus(new Path(checkOSSPath(path.getPath()))); - List fsPaths = new ArrayList(); - for (FileStatus f : stat) { - fsPaths.add( - fillStorageFile( - new FsPath( - StorageUtils.OSS_SCHEMA - + StorageConfiguration.OSS_ACCESS_BUCKET_NAME.getValue() - + "/" - + f.getPath().toUri().getPath()), - f)); - } - if (fsPaths.isEmpty()) { - return null; - } - return new FsPathListWithError(fsPaths, ""); - } - - /** FS interface method start */ - @Override - public void init(Map properties) throws IOException { - // read origin configs from hadoop conf - if (label == null - && (boolean) org.apache.linkis.common.conf.Configuration.IS_MULTIPLE_YARN_CLUSTER()) { - label = StorageConfiguration.LINKIS_STORAGE_FS_LABEL.getValue(); - } - conf = HDFSUtils.getConfigurationByLabel(user, label); - - // origin configs - Map originProperties = Maps.newHashMap(); - originProperties.put("fs.oss.endpoint", StorageConfiguration.OSS_ENDPOINT.getValue()); - originProperties.put("fs.oss.accessKeyId", StorageConfiguration.OSS_ACCESS_KEY_ID.getValue()); - originProperties.put( - "fs.oss.accessKeySecret", StorageConfiguration.OSS_ACCESS_KEY_SECRET.getValue()); - for (String key : originProperties.keySet()) { - String value = originProperties.get(key); - if (StringUtils.isNotBlank(value)) { - conf.set(key, value); - } - } - - // additional configs - if (MapUtils.isNotEmpty(properties)) { - for (String key : properties.keySet()) { - String v = properties.get(key); - if (StringUtils.isNotBlank(v)) { - conf.set(key, v); - } - } - } - fs = new AliyunOSSFileSystem(); - try { - fs.initialize( - new URI(StorageUtils.OSS_SCHEMA + StorageConfiguration.OSS_ACCESS_BUCKET_NAME.getValue()), - conf); - } catch (URISyntaxException e) { - throw new IOException("init OSS FileSystem failed!"); - } - if (fs == null) { - throw new IOException("init OSS FileSystem failed!"); - } - } - - @Override - public String fsName() { - return StorageUtils.OSS; - } - - @Override - public String rootUserName() { - return null; - } - - @Override - public FsPath get(String dest) throws IOException { - String realPath = checkOSSPath(dest); - return fillStorageFile(new FsPath(realPath), fs.getFileStatus(new Path(realPath))); - } - - @Override - public InputStream read(FsPath dest) throws IOException { - if (!canRead(dest)) { - throw new IOException("You have not permission to access path " + dest.getPath()); - } - return fs.open(new Path(dest.getPath()), 128); - } - - @Override - public OutputStream write(FsPath dest, boolean overwrite) throws IOException { - String path = checkOSSPath(dest.getPath()); - if (!exists(dest)) { - if (!canWrite(dest.getParent())) { - throw new IOException("You have not permission to access path " + dest.getParent()); - } - } else { - if (!canWrite(dest)) { - throw new IOException("You have not permission to access path " + path); - } - } - OutputStream out = - fs.create( - new Path(path), - new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL), - overwrite, - 0, - (short) 0, - 0L, - null); - this.setPermission(dest, this.getDefaultFilePerm()); - return out; - } - - @Override - public boolean create(String dest) throws IOException { - if (!canExecute(getParentPath(dest))) { - throw new IOException("You have not permission to access path " + dest); - } - // to do - boolean result = fs.createNewFile(new Path(checkOSSPath(dest))); - this.setPermission(new FsPath(dest), this.getDefaultFilePerm()); - return result; - } - - @Override - public boolean copy(String origin, String dest) throws IOException { - if (!canExecute(getParentPath(dest))) { - throw new IOException("You have not permission to access path " + dest); - } - boolean res = - FileUtil.copy( - fs, - new Path(checkOSSPath(origin)), - fs, - new Path(checkOSSPath(dest)), - false, - true, - fs.getConf()); - this.setPermission(new FsPath(dest), this.getDefaultFilePerm()); - return res; - } - - @Override - public List list(FsPath path) throws IOException { - FileStatus[] stat = fs.listStatus(new Path(checkOSSPath(path.getPath()))); - List fsPaths = new ArrayList(); - for (FileStatus f : stat) { - fsPaths.add(fillStorageFile(new FsPath(f.getPath().toUri().toString()), f)); - } - return fsPaths; - } - - @Override - public boolean canRead(FsPath dest) throws IOException { - return true; - } - - @Override - public boolean canRead(FsPath fsPath, String s) throws IOException { - return false; - } - - @Override - public boolean canWrite(FsPath dest) throws IOException { - return true; - } - - @Override - public boolean exists(FsPath dest) throws IOException { - try { - return fs.exists(new Path(checkOSSPath(dest.getPath()))); - } catch (IOException e) { - String message = e.getMessage(); - String rootCauseMessage = ExceptionUtils.getRootCauseMessage(e); - if ((message != null && message.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS)) - || (rootCauseMessage != null - && rootCauseMessage.matches(LinkisStorageConf.HDFS_FILE_SYSTEM_REST_ERRS))) { - logger.info("Failed to execute exists, retry", e); - resetRootOSS(); - return fs.exists(new Path(checkOSSPath(dest.getPath()))); - } else { - throw e; - } - } - } - - private void resetRootOSS() throws IOException { - if (fs != null) { - synchronized (this) { - if (fs != null) { - fs.close(); - logger.warn(user + " FS reset close."); - init(null); - } - } - } - } - - @Override - public boolean delete(FsPath dest) throws IOException { - String path = checkOSSPath(dest.getPath()); - return fs.delete(new Path(path), true); - } - - @Override - public boolean renameTo(FsPath oldDest, FsPath newDest) throws IOException { - return fs.rename( - new Path(checkOSSPath(oldDest.getPath())), new Path(checkOSSPath(newDest.getPath()))); - } - - @Override - public void close() throws IOException { - if (null != fs) { - fs.close(); - } else { - logger.warn("FS was null, cannot close."); - } - } - - /** Utils method start */ - private FsPath fillStorageFile(FsPath fsPath, FileStatus fileStatus) throws IOException { - fsPath.setAccess_time(fileStatus.getAccessTime()); - fsPath.setModification_time(fileStatus.getModificationTime()); - fsPath.setOwner(fileStatus.getOwner()); - fsPath.setGroup(fileStatus.getGroup()); - fsPath.setIsdir(fileStatus.isDirectory()); - return fsPath; - } - - public String getLabel() { - return label; - } - - public void setLabel(String label) { - this.label = label; - } - - private static String checkOSSPath(String path) { - try { - boolean checkOSSPath = (boolean) StorageConfiguration.OSS_PATH_PREFIX_CHECK_ON.getValue(); - if (checkOSSPath) { - boolean rmOSSPrefix = (boolean) StorageConfiguration.OSS_PATH_PREFIX_REMOVE.getValue(); - if (rmOSSPrefix) { - if (StringUtils.isBlank(path)) { - return path; - } - if (path.startsWith(OSS_PREFIX)) { - int remainIndex = OSS_PREFIX.length(); - String[] t1 = path.substring(remainIndex).split("/", 2); - if (t1.length != 2) { - logger.warn("checkOSSPath Invalid path: " + path); - return path; - } - if (logger.isDebugEnabled()) { - logger.debug("checkOSSPath ori path : {}, after path : {}", path, "/" + t1[1]); - } - return "/" + t1[1]; - } else { - return path; - } - } - } - } catch (Exception e) { - logger.warn("checkOSSPath error. msg : " + e.getMessage() + " ", e); - } - return path; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/S3FileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/S3FileSystem.java deleted file mode 100644 index 1e7a90532c..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/S3FileSystem.java +++ /dev/null @@ -1,464 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.fs.impl; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.storage.domain.FsPathListWithError; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.fs.FileSystem; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; - -import java.io.*; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.client.builder.AwsClientBuilder; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import com.amazonaws.services.s3.model.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.TO_BE_UNKNOW; - -public class S3FileSystem extends FileSystem { - private static final Logger logger = LoggerFactory.getLogger(S3FileSystem.class); - private String accessKey; - private String secretKey; - - private String endPoint; - - private String region; - - private String bucket; - - private String label; - - private AmazonS3 s3Client; - - private static final String INIT_FILE_NAME = ".s3_dir_init"; - - @Override - public void init(Map properties) throws IOException { - accessKey = StorageConfiguration.S3_ACCESS_KEY.getValue(properties); - secretKey = StorageConfiguration.S3_SECRET_KEY.getValue(properties); - endPoint = StorageConfiguration.S3_ENDPOINT.getValue(properties); - bucket = StorageConfiguration.S3_BUCKET.getValue(properties); - region = StorageConfiguration.S3_REGION.getValue(properties); - - AwsClientBuilder.EndpointConfiguration endpointConfiguration = - new AwsClientBuilder.EndpointConfiguration(endPoint, region); - - BasicAWSCredentials basicAWSCredentials = new BasicAWSCredentials(accessKey, secretKey); - - AWSStaticCredentialsProvider StaticCredentials = - new AWSStaticCredentialsProvider(basicAWSCredentials); - - s3Client = - AmazonS3ClientBuilder.standard() - .withEndpointConfiguration(endpointConfiguration) - .withPathStyleAccessEnabled(true) - .withCredentials(StaticCredentials) - .build(); - } - - @Override - public String fsName() { - return StorageUtils.S3; - } - - @Override - public String rootUserName() { - return null; - } - - @Override - public FsPath get(String dest) throws IOException { - FsPath ret = new FsPath(dest); - if (exists(ret)) { - return ret; - } else { - logger.warn("File or folder does not exist or file name is garbled(文件或者文件夹不存在或者文件名乱码)"); - throw new StorageWarnException( - TO_BE_UNKNOW.getErrorCode(), - "File or folder does not exist or file name is garbled(文件或者文件夹不存在或者文件名乱码)"); - } - } - - @Override - public InputStream read(FsPath dest) throws IOException { - try { - return s3Client.getObject(bucket, buildPrefix(dest.getPath(), false)).getObjectContent(); - } catch (AmazonS3Exception e) { - throw new IOException("You have not permission to access path " + dest.getPath()); - } - } - - @Override - public OutputStream write(FsPath dest, boolean overwrite) throws IOException { - try (InputStream inputStream = read(dest); - OutputStream outputStream = - new S3OutputStream(s3Client, bucket, buildPrefix(dest.getPath(), false))) { - if (!overwrite) { - IOUtils.copy(inputStream, outputStream); - } - return outputStream; - } - } - - @Override - public boolean create(String dest) throws IOException { - if (exists(new FsPath(dest))) { - return false; - } - s3Client.putObject(bucket, dest, ""); - return true; - } - - @Override - public List list(FsPath path) throws IOException { - try { - if (!StringUtils.isEmpty(path.getPath())) { - ListObjectsV2Result listObjectsV2Result = s3Client.listObjectsV2(bucket, path.getPath()); - List s3ObjectSummaries = listObjectsV2Result.getObjectSummaries(); - return s3ObjectSummaries.stream() - .filter(summary -> !isInitFile(summary)) - .map( - summary -> { - FsPath newPath = new FsPath(buildPath(summary.getKey())); - return fillStorageFile(newPath, summary); - }) - .collect(Collectors.toList()); - } - } catch (AmazonS3Exception e) { - throw new IOException("You have not permission to access path " + path.getPath()); - } - - return new ArrayList<>(); - } - - @Override - public FsPathListWithError listPathWithError(FsPath path) throws IOException { - return listPathWithError(path, true); - } - - public FsPathListWithError listPathWithError(FsPath path, boolean ignoreInitFile) - throws IOException { - List rtn = new ArrayList<>(); - try { - if (!StringUtils.isEmpty(path.getPath())) { - ListObjectsV2Request listObjectsV2Request = - new ListObjectsV2Request() - .withBucketName(bucket) - .withPrefix(buildPrefix(path.getPath())) - .withDelimiter("/"); - ListObjectsV2Result dirResult = s3Client.listObjectsV2(listObjectsV2Request); - List s3ObjectSummaries = dirResult.getObjectSummaries(); - List commonPrefixes = dirResult.getCommonPrefixes(); - if (s3ObjectSummaries != null) { - for (S3ObjectSummary summary : s3ObjectSummaries) { - if (isInitFile(summary) && ignoreInitFile) continue; - FsPath newPath = new FsPath(buildPath(summary.getKey())); - rtn.add(fillStorageFile(newPath, summary)); - } - } - if (commonPrefixes != null) { - for (String dir : commonPrefixes) { - FsPath newPath = new FsPath(buildPath(dir)); - newPath.setIsdir(true); - rtn.add(newPath); - } - } - return new FsPathListWithError(rtn, ""); - } - } catch (AmazonS3Exception e) { - throw new IOException("You have not permission to access path " + path.getPath()); - } - - return null; - } - - @Override - public boolean exists(FsPath dest) throws IOException { - try { - if (new File(dest.getPath()).getName().contains(".")) { - return existsFile(dest); - } - ListObjectsV2Request listObjectsV2Request = new ListObjectsV2Request(); - listObjectsV2Request - .withBucketName(bucket) - .withPrefix(buildPrefix(dest.getPath())) - .withDelimiter("/"); - return s3Client.listObjectsV2(listObjectsV2Request).getObjectSummaries().size() - + s3Client.listObjectsV2(listObjectsV2Request).getCommonPrefixes().size() - > 0; - } catch (AmazonS3Exception e) { - return false; - } - } - - public boolean existsFile(FsPath dest) { - try { - return s3Client.doesObjectExist(bucket, buildPrefix(dest.getPath(), false)); - } catch (AmazonS3Exception e) { - return false; - } - } - - @Override - public boolean delete(FsPath dest) throws IOException { - try { - ListObjectsV2Request listObjectsV2Request = new ListObjectsV2Request(); - listObjectsV2Request.withBucketName(bucket).withPrefix(buildPrefix(dest.getPath(), false)); - ListObjectsV2Result result = s3Client.listObjectsV2(listObjectsV2Request); - String[] keyList = - result.getObjectSummaries().stream().map(S3ObjectSummary::getKey).toArray(String[]::new); - DeleteObjectsRequest deleteObjectsRequest = - new DeleteObjectsRequest("test").withKeys(keyList); - s3Client.deleteObjects(deleteObjectsRequest); - return true; - } catch (AmazonS3Exception e) { - throw new IOException("You have not permission to access path " + dest.getPath()); - } - } - - @Override - public boolean renameTo(FsPath oldDest, FsPath newDest) throws IOException { - try { - String newOriginPath = buildPrefix(oldDest.getPath(), false); - String newDestPath = buildPrefix(newDest.getPath(), false); - ListObjectsV2Request listObjectsV2Request = new ListObjectsV2Request(); - listObjectsV2Request.withBucketName(bucket).withPrefix(newOriginPath); - ListObjectsV2Result result = s3Client.listObjectsV2(listObjectsV2Request); - List keyList = - result.getObjectSummaries().stream() - .map(S3ObjectSummary::getKey) - .collect(Collectors.toList()); - List newKeyList = - keyList.stream() - .map(key -> key.replaceFirst(newOriginPath, newDestPath)) - .collect(Collectors.toList()); - for (int i = 0; i < keyList.size(); i++) { - String key = keyList.get(i); - String newKey = newKeyList.get(i); - s3Client.copyObject(bucket, key, bucket, newKey); - s3Client.deleteObject(bucket, key); - } - return true; - } catch (AmazonS3Exception e) { - s3Client.deleteObject(bucket, newDest.getPath()); - throw new IOException( - "You have not permission to access path " - + oldDest.getPath() - + " or " - + newDest.getPath()); - } - } - - @Override - public boolean copy(String origin, String dest) throws IOException { - try { - String newOrigin = buildPrefix(origin, false); - String newDest = buildPrefix(dest, false); - ListObjectsV2Request listObjectsV2Request = new ListObjectsV2Request(); - listObjectsV2Request.withBucketName(bucket).withPrefix(newOrigin); - ListObjectsV2Result result = s3Client.listObjectsV2(listObjectsV2Request); - List keyList = - result.getObjectSummaries().stream() - .map(S3ObjectSummary::getKey) - .collect(Collectors.toList()); - List newKeyList = - keyList.stream() - .map(key -> key.replaceFirst(newOrigin, newDest)) - .collect(Collectors.toList()); - for (int i = 0; i < keyList.size(); i++) { - String key = keyList.get(i); - String newKey = newKeyList.get(i); - s3Client.copyObject(bucket, key, bucket, newKey); - } - return true; - } catch (AmazonS3Exception e) { - throw new IOException("You have not permission to access path " + origin + " or " + dest); - } - } - - private boolean isDir(S3ObjectSummary s3ObjectSummary, String prefix) { - return s3ObjectSummary.getKey().substring(prefix.length()).contains("/"); - } - - private boolean isInitFile(S3ObjectSummary s3ObjectSummary) { - return s3ObjectSummary.getKey().contains(INIT_FILE_NAME); - } - - @Override - public String listRoot() { - return "/"; - } - - @Override - public boolean mkdir(FsPath dest) throws IOException { - String path = new File(dest.getPath(), INIT_FILE_NAME).getPath(); - if (exists(new FsPath(path))) { - return false; - } - return create(path); - } - - @Override - public boolean mkdirs(FsPath dest) throws IOException { - return mkdir(dest); - } - - private FsPath fillStorageFile(FsPath fsPath, S3ObjectSummary s3ObjectSummary) { - fsPath.setModification_time(s3ObjectSummary.getLastModified().getTime()); - Owner owner = s3ObjectSummary.getOwner(); - if (owner != null) { - fsPath.setOwner(owner.getDisplayName()); - } - try { - fsPath.setIsdir(isDir(s3ObjectSummary, fsPath.getParent().getPath())); - } catch (Throwable e) { - logger.warn("Failed to fill storage file:" + fsPath.getPath(), e); - } - - if (fsPath.isdir()) { - fsPath.setLength(0); - } else { - fsPath.setLength(s3ObjectSummary.getSize()); - } - return fsPath; - } - - @Override - public boolean canRead(FsPath dest) { - return true; - } - - @Override - public boolean canRead(FsPath fsPath, String s) throws IOException { - return false; - } - - @Override - public boolean canWrite(FsPath dest) { - return true; - } - - @Override - public long getTotalSpace(FsPath dest) { - return 0; - } - - @Override - public long getFreeSpace(FsPath dest) { - return 0; - } - - @Override - public long getUsableSpace(FsPath dest) { - return 0; - } - - @Override - public boolean canExecute(FsPath dest) { - return true; - } - - @Override - public boolean setOwner(FsPath dest, String user, String group) { - return false; - } - - @Override - public boolean setOwner(FsPath dest, String user) { - return false; - } - - @Override - public boolean setGroup(FsPath dest, String group) { - return false; - } - - @Override - public boolean setPermission(FsPath dest, String permission) { - return false; - } - - @Override - public void close() throws IOException {} - - public String getLabel() { - return label; - } - - public void setLabel(String label) { - this.label = label; - } - - public String buildPath(String path) { - if (path == null || "".equals(path)) return ""; - if (path.startsWith("/")) { - return StorageUtils.S3_SCHEMA + path; - } - return StorageUtils.S3_SCHEMA + "/" + path; - } - - public String buildPrefix(String path, boolean addTail) { - String res = path; - if (path == null || "".equals(path)) return ""; - if (path.startsWith("/")) { - res = path.replaceFirst("/", ""); - } - if (!path.endsWith("/") && addTail) { - res = res + "/"; - } - return res; - } - - public String buildPrefix(String path) { - return buildPrefix(path, true); - } -} - -class S3OutputStream extends ByteArrayOutputStream { - private AmazonS3 s3Client; - private String bucket; - private String path; - - public S3OutputStream(AmazonS3 s3Client, String bucket, String path) { - this.s3Client = s3Client; - this.bucket = bucket; - this.path = path; - } - - @Override - public void close() throws IOException { - byte[] buffer = this.toByteArray(); - try (InputStream in = new ByteArrayInputStream(buffer)) { - s3Client.putObject(bucket, path, in, new ObjectMetadata()); - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOMethodInterceptorFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOMethodInterceptorFactory.java deleted file mode 100644 index 8f0c4016d1..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOMethodInterceptorFactory.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.io; - -import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary; -import org.apache.linkis.storage.exception.StorageWarnException; - -import org.springframework.cglib.proxy.MethodInterceptor; - -public class IOMethodInterceptorFactory { - - private static IOMethodInterceptorCreator interceptorCreator = null; - - private IOMethodInterceptorFactory() {} - - /** - * This method is called when ioClient is initialized. ioClient初始化时会调用该方法 - * - * @param interceptorCreator - */ - public static void register(IOMethodInterceptorCreator interceptorCreator) { - IOMethodInterceptorFactory.interceptorCreator = interceptorCreator; - } - - public static MethodInterceptor getIOMethodInterceptor(String fsName) - throws StorageWarnException { - if (IOMethodInterceptorFactory.interceptorCreator == null) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.MUST_REGISTER_TOM.getErrorCode(), - LinkisStorageErrorCodeSummary.MUST_REGISTER_TOM.getErrorDesc()); - } - return IOMethodInterceptorFactory.interceptorCreator.createIOMethodInterceptor(fsName); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/DefaultResultSetFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/DefaultResultSetFactory.java deleted file mode 100644 index da51a2f13d..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/DefaultResultSetFactory.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.storage.FSFactory; -import org.apache.linkis.storage.conf.LinkisStorageConf; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; - -import java.io.IOException; -import java.io.InputStream; -import java.text.MessageFormat; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.THE_FILE_IS_EMPTY; - -public class DefaultResultSetFactory implements ResultSetFactory { - - private static final Logger logger = LoggerFactory.getLogger(DefaultResultSetFactory.class); - - private final Map>> resultClasses; - - private final String[] resultTypes; - - public DefaultResultSetFactory() { - resultClasses = - StorageUtils.loadClasses( - StorageConfiguration.STORAGE_RESULT_SET_CLASSES.getValue(), - StorageConfiguration.STORAGE_RESULT_SET_PACKAGE.getValue(), - t -> { - try { - return t.newInstance().resultSetType().toLowerCase(Locale.getDefault()); - } catch (InstantiationException e) { - logger.warn("DefaultResultSetFactory init failed", e); - } catch (IllegalAccessException e) { - logger.warn("DefaultResultSetFactory init failed", e); - } - return null; - }); - resultTypes = ResultSetFactory.resultSetType.keySet().toArray(new String[0]); - } - - @Override - public ResultSet getResultSetByType(String resultSetType) { - if (!resultClasses.containsKey(resultSetType)) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_RESULT.getErrorCode(), - MessageFormat.format( - LinkisStorageErrorCodeSummary.UNSUPPORTED_RESULT.getErrorDesc(), resultSetType)); - } - try { - return resultClasses.get(resultSetType).newInstance(); - } catch (InstantiationException | IllegalAccessException e) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_RESULT.getErrorCode(), - MessageFormat.format( - LinkisStorageErrorCodeSummary.UNSUPPORTED_RESULT.getErrorDesc(), resultSetType), - e); - } - } - - @Override - public ResultSet getResultSetByPath(FsPath fsPath) - throws StorageWarnException { - return getResultSetByPath(fsPath, StorageUtils.getJvmUser()); - } - - @Override - public ResultSet getResultSetByContent(String content) { - return getResultSetByType(Dolphin.getType(content)); - } - - @Override - public boolean exists(String resultSetType) { - return resultClasses.containsKey(resultSetType); - } - - @Override - public boolean isResultSetPath(String path) { - return path.endsWith(Dolphin.DOLPHIN_FILE_SUFFIX) - || path.endsWith(LinkisStorageConf.PARQUET_FILE_SUFFIX) - || path.endsWith(LinkisStorageConf.ORC_FILE_SUFFIX); - } - - @Override - public boolean isResultSet(String content) { - try { - return resultClasses.containsKey(Dolphin.getType(content)); - } catch (Exception e) { - logger.info("Wrong result Set: " + e.getMessage()); - return false; - } - } - - @Override - public ResultSet getResultSet(String output) - throws StorageWarnException { - return getResultSet(output, StorageUtils.getJvmUser()); - } - - @Override - public String[] getResultSetType() { - return Arrays.copyOf(resultTypes, resultTypes.length); - } - - @Override - public ResultSet getResultSetByPath(FsPath fsPath, Fs fs) { - ResultSet resultSet = null; - try (InputStream inputStream = fs.read(fsPath)) { - if (fsPath.getPath().endsWith(Dolphin.DOLPHIN_FILE_SUFFIX)) { - String resultSetType = Dolphin.getType(inputStream); - if (StringUtils.isEmpty(resultSetType)) { - throw new StorageWarnException( - THE_FILE_IS_EMPTY.getErrorCode(), - MessageFormat.format(THE_FILE_IS_EMPTY.getErrorDesc(), fsPath.getPath())); - } - // Utils.tryQuietly(fs::close); - resultSet = getResultSetByType(resultSetType); - } else if (fsPath.getPath().endsWith(LinkisStorageConf.PARQUET_FILE_SUFFIX) - || fsPath.getPath().endsWith(LinkisStorageConf.ORC_FILE_SUFFIX)) { - resultSet = getResultSetByType(ResultSetFactory.TABLE_TYPE); - } - return resultSet; - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public ResultSet getResultSetByPath( - FsPath fsPath, String proxyUser) { - if (fsPath == null) { - return null; - } - logger.info("Get Result Set By Path:" + fsPath.getPath()); - try (Fs fs = FSFactory.getFsByProxyUser(fsPath, proxyUser)) { - fs.init(new HashMap<>()); - try (InputStream inputStream = fs.read(fsPath)) { - String resultSetType = Dolphin.getType(inputStream); - if (StringUtils.isEmpty(resultSetType)) { - throw new StorageWarnException( - THE_FILE_IS_EMPTY.getErrorCode(), - MessageFormat.format(THE_FILE_IS_EMPTY.getErrorDesc(), fsPath.getPath())); - } - IOUtils.closeQuietly(inputStream); - return getResultSetByType(resultSetType); - } catch (IOException e) { - throw new RuntimeException(e); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public ResultSet getResultSet( - String output, String proxyUser) { - if (isResultSetPath(output)) { - return getResultSetByPath(new FsPath(output), proxyUser); - } else if (isResultSet(output)) { - return getResultSetByContent(output); - } else { - return null; - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/OrcResultSetReader.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/OrcResultSetReader.java deleted file mode 100644 index 249e326cde..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/OrcResultSetReader.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; - -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.orc.OrcFile; -import org.apache.orc.Reader; -import org.apache.orc.RecordReader; -import org.apache.orc.TypeDescription; -import org.apache.orc.storage.ql.exec.vector.*; - -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class OrcResultSetReader - extends ResultSetReader { - - private static final Logger logger = LoggerFactory.getLogger(OrcResultSetReader.class); - - private FsPath fsPath; - - private final ResultSet resultSet; - - private final InputStream inputStream; - - private MetaData metaData; - - private Record row; - - private RecordReader rows; - - private Reader reader; - - public OrcResultSetReader(ResultSet resultSet, InputStream inputStream, FsPath fsPath) - throws IOException { - super(resultSet, inputStream); - this.resultSet = resultSet; - this.inputStream = inputStream; - this.fsPath = fsPath; - this.reader = - OrcFile.createReader( - new Path(fsPath.getPath()), OrcFile.readerOptions(new Configuration())); - this.rows = reader.rows(); - } - - @Override - public MetaData getMetaData() { - if (metaData == null) { - try { - List fieldNames = reader.getSchema().getFieldNames(); - List typeDescriptions = reader.getSchema().getChildren(); - List columnList = new ArrayList<>(); - for (int i = 0; i < fieldNames.size(); i++) { - Column column = - new Column( - fieldNames.get(i), - DataType.toDataType(typeDescriptions.get(i).getCategory().getName()), - ""); - columnList.add(column); - } - - metaData = new TableMetaData(columnList.toArray(new Column[0])); - } catch (Exception e) { - throw new RuntimeException("Failed to read parquet schema", e); - } - } - return metaData; - } - - @Override - public int skip(int recordNum) throws IOException { - if (recordNum < 0) return -1; - - for (int i = recordNum; i > 0; i--) { - try { - hasNext(); - } catch (Throwable t) { - return recordNum - i; - } - } - return recordNum; - } - - @Override - public long getPosition() throws IOException { - throw new UnsupportedOperationException("Storeage Unsupported type: getPosition"); - } - - @Override - public long available() throws IOException { - throw new UnsupportedOperationException("Storeage Unsupported type: available"); - } - - @Override - public boolean hasNext() throws IOException { - if (metaData == null) getMetaData(); - if (rows == null) return false; - VectorizedRowBatch batch = - reader.getSchema().createRowBatch(TypeDescription.RowBatchVersion.ORIGINAL, 1); - TableMetaData tableMetaData = (TableMetaData) metaData; - - if (rows.nextBatch(batch)) { - int rowNum = 0; - Object[] rowData = new Object[tableMetaData.getColumns().length]; - for (int i = 0; i < batch.numCols; i++) { - ColumnVector columnVector = batch.cols[i]; - if (columnVector instanceof BytesColumnVector) { - BytesColumnVector vector = (BytesColumnVector) columnVector; - rowData[i] = vector.toString(rowNum); - } else if (columnVector instanceof Decimal64ColumnVector) { - Decimal64ColumnVector vector = (Decimal64ColumnVector) columnVector; - rowData[i] = vector.vector[rowNum]; - } else if (columnVector instanceof DecimalColumnVector) { - DecimalColumnVector vector = (DecimalColumnVector) columnVector; - rowData[i] = vector.vector[rowNum]; - } else if (columnVector instanceof DoubleColumnVector) { - DoubleColumnVector vector = (DoubleColumnVector) columnVector; - rowData[i] = vector.vector[rowNum]; - } else if (columnVector instanceof ListColumnVector) { - ListColumnVector vector = (ListColumnVector) columnVector; - StringBuilder builder = new StringBuilder(); - vector.stringifyValue(builder, rowNum); - rowData[i] = builder.toString(); - } else if (columnVector instanceof IntervalDayTimeColumnVector) { - IntervalDayTimeColumnVector vector = (IntervalDayTimeColumnVector) columnVector; - StringBuilder builder = new StringBuilder(); - vector.stringifyValue(builder, rowNum); - rowData[i] = builder.toString(); - } else if (columnVector instanceof LongColumnVector) { - LongColumnVector vector = (LongColumnVector) columnVector; - rowData[i] = vector.vector[rowNum]; - } else if (columnVector instanceof MapColumnVector) { - MapColumnVector vector = (MapColumnVector) columnVector; - StringBuilder builder = new StringBuilder(); - vector.stringifyValue(builder, rowNum); - rowData[i] = builder.toString(); - } else if (columnVector instanceof MultiValuedColumnVector) { - MultiValuedColumnVector vector = (MultiValuedColumnVector) columnVector; - StringBuilder builder = new StringBuilder(); - vector.stringifyValue(builder, rowNum); - rowData[i] = builder.toString(); - } else if (columnVector instanceof StructColumnVector) { - StructColumnVector vector = (StructColumnVector) columnVector; - StringBuilder builder = new StringBuilder(); - vector.stringifyValue(builder, rowNum); - rowData[i] = builder.toString(); - } else if (columnVector instanceof TimestampColumnVector) { - TimestampColumnVector vector = (TimestampColumnVector) columnVector; - rowData[i] = vector.time[rowNum]; - } else if (columnVector instanceof UnionColumnVector) { - UnionColumnVector vector = (UnionColumnVector) columnVector; - StringBuilder builder = new StringBuilder(); - vector.stringifyValue(builder, rowNum); - rowData[i] = builder.toString(); - } - } - row = new TableRecord(rowData); - } else { - return false; - } - return row != null; - } - - @Override - public Record getRecord() { - if (metaData == null) throw new RuntimeException("Must read metadata first(必须先读取metadata)"); - if (row == null) { - throw new RuntimeException( - "Can't get the value of the field, maybe the IO stream has been read or has been closed!(拿不到字段的值,也许IO流已读取完毕或已被关闭!)"); - } - return row; - } - - @Override - public void close() throws IOException { - IOUtils.closeQuietly(inputStream); - rows.close(); - reader.close(); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/OrcResultSetWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/OrcResultSetWriter.java deleted file mode 100644 index c4809f6499..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/OrcResultSetWriter.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetWriter; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; -import org.apache.linkis.storage.utils.OrcUtils; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.orc.CompressionKind; -import org.apache.orc.OrcFile; -import org.apache.orc.TypeDescription; -import org.apache.orc.Writer; -import org.apache.orc.storage.ql.exec.vector.VectorizedRowBatch; - -import java.io.IOException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class OrcResultSetWriter - extends ResultSetWriter { - private static final Logger logger = LoggerFactory.getLogger(OrcResultSetWriter.class); - - private TypeDescription schema; - - private Writer writer; - - private boolean moveToWriteRow = false; - - private MetaData metaData = null; - - private final FsPath storePath; - - private final long maxCacheSize; - - private final ResultSet resultSet; - - public OrcResultSetWriter(ResultSet resultSet, long maxCacheSize, FsPath storePath) { - super(resultSet, maxCacheSize, storePath); - this.resultSet = resultSet; - this.maxCacheSize = maxCacheSize; - this.storePath = storePath; - } - - @Override - public void addMetaData(MetaData metaData) throws IOException { - if (!moveToWriteRow) { - this.metaData = metaData; - if (this.schema == null) { - this.schema = TypeDescription.createStruct(); - } - TableMetaData tableMetaData = (TableMetaData) this.metaData; - for (Column column : tableMetaData.columns) { - schema.addField(column.getColumnName(), OrcUtils.dataTypeToOrcType(column.getDataType())); - } - moveToWriteRow = true; - if (writer == null) { - writer = - OrcFile.createWriter( - new Path(storePath.getPath()), - OrcFile.writerOptions(new Configuration()) - .setSchema(schema) - .compress(CompressionKind.ZLIB) - .version(OrcFile.Version.V_0_12)); - } - } - } - - @Override - public void addRecord(Record record) { - if (moveToWriteRow) { - TableRecord tableRecord = (TableRecord) record; - TableMetaData tableMetaData = (TableMetaData) metaData; - try { - Object[] row = tableRecord.row; - VectorizedRowBatch batch = schema.createRowBatch(); - int rowCount = batch.size++; - - for (int i = 0; i < row.length; i++) { - OrcUtils.setColumn( - rowCount, batch.cols[i], tableMetaData.columns[i].getDataType(), row[i]); - if (batch.size == batch.getMaxSize()) { - writer.addRowBatch(batch); - batch.reset(); - } - } - writer.addRowBatch(batch); - - } catch (IOException e) { - logger.warn("addMetaDataAndRecordString failed", e); - } - } - } - - @Override - public FsPath toFSPath() { - return storePath; - } - - @Override - public String toString() { - return storePath.getSchemaPath(); - } - - @Override - public void addMetaDataAndRecordString(String content) {} - - @Override - public void addRecordString(String content) {} - - @Override - public void close() throws IOException { - writer.close(); - } - - @Override - public void flush() throws IOException {} -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ParquetResultSetReader.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ParquetResultSetReader.java deleted file mode 100644 index c09804294d..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ParquetResultSetReader.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; - -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericRecord; -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.fs.Path; -import org.apache.parquet.avro.AvroParquetReader; -import org.apache.parquet.hadoop.ParquetReader; - -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ParquetResultSetReader - extends ResultSetReader { - - private static final Logger logger = LoggerFactory.getLogger(ParquetResultSetReader.class); - - private FsPath fsPath; - - private final ResultSet resultSet; - - private final InputStream inputStream; - - private MetaData metaData; - - private Record row; - - private ParquetReader parquetReader; - - private GenericRecord record; - - public ParquetResultSetReader(ResultSet resultSet, InputStream inputStream, FsPath fsPath) - throws IOException { - super(resultSet, inputStream); - this.resultSet = resultSet; - this.inputStream = inputStream; - this.fsPath = fsPath; - this.parquetReader = - AvroParquetReader.builder(new Path(fsPath.getPath())).build(); - this.record = parquetReader.read(); - } - - @Override - public MetaData getMetaData() { - if (metaData == null) { - try { - List fields = record.getSchema().getFields(); - List columnList = - fields.stream() - .map( - field -> - new Column( - field.name(), - DataType.toDataType(field.schema().getType().getName()), - "")) - .collect(Collectors.toList()); - - metaData = new TableMetaData(columnList.toArray(new Column[0])); - } catch (Exception e) { - throw new RuntimeException("Failed to read parquet schema", e); - } - } - return metaData; - } - - @Override - public int skip(int recordNum) throws IOException { - if (recordNum < 0) return -1; - - for (int i = recordNum; i > 0; i--) { - try { - this.record = parquetReader.read(); - } catch (Throwable t) { - return recordNum - i; - } - } - return recordNum; - } - - @Override - public long getPosition() throws IOException { - throw new UnsupportedOperationException("Storeage Unsupported type: getPosition"); - } - - @Override - public long available() throws IOException { - throw new UnsupportedOperationException("Storeage Unsupported type: available"); - } - - @Override - public boolean hasNext() throws IOException { - if (metaData == null) getMetaData(); - if (record == null) return false; - ArrayList resultList = new ArrayList<>(); - TableMetaData tableMetaData = (TableMetaData) metaData; - int length = tableMetaData.getColumns().length; - for (int i = 0; i < length; i++) { - resultList.add(record.get(i)); - } - row = new TableRecord(resultList.toArray(new Object[0])); - if (row == null) return false; - return record != null; - } - - @Override - public Record getRecord() { - if (metaData == null) throw new RuntimeException("Must read metadata first(必须先读取metadata)"); - if (row == null) { - throw new RuntimeException( - "Can't get the value of the field, maybe the IO stream has been read or has been closed!(拿不到字段的值,也许IO流已读取完毕或已被关闭!)"); - } - try { - this.record = parquetReader.read(); - } catch (IOException e) { - throw new RuntimeException("Failed to read parquet record", e); - } - return row; - } - - @Override - public void close() throws IOException { - IOUtils.closeQuietly(inputStream); - parquetReader.close(); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ParquetResultSetWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ParquetResultSetWriter.java deleted file mode 100644 index 6fbac3c8cb..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ParquetResultSetWriter.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetWriter; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; - -import org.apache.avro.Schema; -import org.apache.avro.SchemaBuilder; -import org.apache.avro.generic.GenericData; -import org.apache.avro.generic.GenericRecord; -import org.apache.hadoop.fs.Path; -import org.apache.parquet.avro.AvroParquetWriter; -import org.apache.parquet.hadoop.ParquetFileWriter; -import org.apache.parquet.hadoop.ParquetWriter; -import org.apache.parquet.hadoop.metadata.CompressionCodecName; - -import java.io.IOException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ParquetResultSetWriter - extends ResultSetWriter { - private static final Logger logger = LoggerFactory.getLogger(ParquetResultSetWriter.class); - - private Schema schema; - - private ParquetWriter parquetWriter; - - private boolean moveToWriteRow = false; - - private MetaData metaData = null; - - private final FsPath storePath; - - private final long maxCacheSize; - - private final ResultSet resultSet; - - public ParquetResultSetWriter(ResultSet resultSet, long maxCacheSize, FsPath storePath) { - super(resultSet, maxCacheSize, storePath); - this.resultSet = resultSet; - this.maxCacheSize = maxCacheSize; - this.storePath = storePath; - } - - @Override - public void addMetaData(MetaData metaData) throws IOException { - if (!moveToWriteRow) { - this.metaData = metaData; - SchemaBuilder.FieldAssembler fieldAssembler = SchemaBuilder.record("linkis").fields(); - TableMetaData tableMetaData = (TableMetaData) this.metaData; - for (Column column : tableMetaData.columns) { - fieldAssembler - .name(column.getColumnName().replaceAll("\\.", "_").replaceAll("[^a-zA-Z0-9_]", "")) - .doc(column.getComment()) - .type(column.getDataType().getTypeName().toLowerCase()) - .noDefault(); - } - schema = fieldAssembler.endRecord(); - moveToWriteRow = true; - if (parquetWriter == null) { - parquetWriter = - AvroParquetWriter.builder(new Path(storePath.getPath())) - .withSchema(schema) - .withCompressionCodec(CompressionCodecName.SNAPPY) - .withWriteMode(ParquetFileWriter.Mode.OVERWRITE) - .build(); - } - } - } - - @Override - public void addRecord(Record record) { - if (moveToWriteRow) { - TableRecord tableRecord = (TableRecord) record; - try { - Object[] row = tableRecord.row; - GenericRecord genericRecord = new GenericData.Record(schema); - for (int i = 0; i < row.length; i++) { - genericRecord.put(schema.getFields().get(i).name(), row[i]); - } - parquetWriter.write(genericRecord); - } catch (IOException e) { - logger.warn("addMetaDataAndRecordString failed", e); - } - } - } - - @Override - public FsPath toFSPath() { - return storePath; - } - - @Override - public String toString() { - return storePath.getSchemaPath(); - } - - @Override - public void addMetaDataAndRecordString(String content) {} - - @Override - public void addRecordString(String content) {} - - @Override - public void close() throws IOException { - parquetWriter.close(); - } - - @Override - public void flush() throws IOException {} -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetFactory.java deleted file mode 100644 index ed65cea16c..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetFactory.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.*; -import org.apache.linkis.common.io.resultset.ResultSet; - -import java.util.LinkedHashMap; -import java.util.Map; - -public interface ResultSetFactory { - - String TEXT_TYPE = "1"; - String TABLE_TYPE = "2"; - String IO_TYPE = "3"; - String PICTURE_TYPE = "4"; - String HTML_TYPE = "5"; - - /** TODO 修改为注册形式,并修改ResultSet的getResultType逻辑 Result set corresponding type record(结果集对应类型记录) */ - Map resultSetType = - new LinkedHashMap() { - { - put(TEXT_TYPE, "TEXT"); - put(TABLE_TYPE, "TABLE"); - put(IO_TYPE, "IO"); - put(PICTURE_TYPE, "PICTURE"); - put(HTML_TYPE, "HTML"); - } - }; - - DefaultResultSetFactory factory = new DefaultResultSetFactory(); - - static ResultSetFactory getInstance() { - return factory; - } - - ResultSet getResultSetByType(String resultSetType); - - ResultSet getResultSetByPath(FsPath fsPath); - - ResultSet getResultSetByPath(FsPath fsPath, Fs fs); - - ResultSet getResultSetByContent(String content); - - boolean exists(String resultSetType); - - boolean isResultSetPath(String path); - - boolean isResultSet(String content); - - ResultSet getResultSet(String output); - - ResultSet getResultSetByPath( - FsPath fsPath, String proxyUser); - - ResultSet getResultSet(String output, String proxyUser); - - String[] getResultSetType(); -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java deleted file mode 100644 index bf46f49f9b..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.storage.FSFactory; -import org.apache.linkis.storage.conf.LinkisStorageConf; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary; -import org.apache.linkis.storage.exception.StorageErrorCode; -import org.apache.linkis.storage.exception.StorageReadException; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; -import org.apache.linkis.storage.resultset.table.TableResultSet; - -import java.io.IOException; -import java.io.InputStream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ResultSetReaderFactory { - private static final Logger logger = LoggerFactory.getLogger(ResultSetReaderFactory.class); - - public static ResultSetReader getResultSetReader( - ResultSet resultSet, InputStream inputStream, FsPath fsPath) { - ResultSetReader resultSetReader = null; - if (fsPath.getPath().endsWith(Dolphin.DOLPHIN_FILE_SUFFIX)) { - resultSetReader = new StorageResultSetReader<>(resultSet, inputStream); - } else if (fsPath.getPath().endsWith(LinkisStorageConf.PARQUET_FILE_SUFFIX)) { - try { - resultSetReader = new ParquetResultSetReader<>(resultSet, inputStream, fsPath); - } catch (IOException e) { - throw new StorageReadException( - StorageErrorCode.READ_PARQUET_FAILED.getCode(), - StorageErrorCode.READ_PARQUET_FAILED.getMessage(), - e); - } - } else if (fsPath.getPath().endsWith(LinkisStorageConf.ORC_FILE_SUFFIX)) { - try { - resultSetReader = new OrcResultSetReader<>(resultSet, inputStream, fsPath); - } catch (IOException e) { - throw new StorageReadException( - StorageErrorCode.READ_ORC_FAILED.getCode(), - StorageErrorCode.READ_ORC_FAILED.getMessage(), - e); - } - } - return resultSetReader; - } - - public static ResultSetReader getResultSetReader( - ResultSet resultSet, String value) { - return new StorageResultSetReader<>(resultSet, value); - } - - public static ResultSetReader getResultSetReader(String res) throws IOException { - ResultSetFactory rsFactory = ResultSetFactory.getInstance(); - if (rsFactory.isResultSet(res)) { - ResultSet resultSet = rsFactory.getResultSet(res); - return ResultSetReaderFactory.getResultSetReader(resultSet, res); - } else { - FsPath resPath = new FsPath(res); - ResultSet resultSet = - rsFactory.getResultSetByPath(resPath); - Fs fs = FSFactory.getFs(resPath); - fs.init(null); - ResultSetReader reader = - ResultSetReaderFactory.getResultSetReader(resultSet, fs.read(resPath), resPath); - if (reader instanceof StorageResultSetReader) { - ((StorageResultSetReader) reader).setFs(fs); - } - return (StorageResultSetReader) reader; - } - } - - public static ResultSetReader getTableResultReader(String res) { - ResultSetFactory rsFactory = ResultSetFactory.getInstance(); - if (rsFactory.isResultSet(res)) { - ResultSet resultSet = rsFactory.getResultSet(res); - if (!ResultSetFactory.TABLE_TYPE.equals(resultSet.resultSetType())) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorCode(), - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorDesc()); - } - return ResultSetReaderFactory.getResultSetReader( - (TableResultSet) resultSet, res); - } else { - FsPath resPath = new FsPath(res); - ResultSet resultSet = rsFactory.getResultSetByPath(resPath); - if (!ResultSetFactory.TABLE_TYPE.equals(resultSet.resultSetType())) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorCode(), - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorDesc()); - } - - Fs fs = FSFactory.getFs(resPath); - logger.info("Try to init Fs with path:{}", resPath.getPath()); - try { - fs.init(null); - InputStream read = fs.read(resPath); - - return ResultSetReaderFactory.getResultSetReader( - (TableResultSet) resultSet, read, resPath); - } catch (IOException e) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorCode(), - LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED.getErrorDesc()); - } - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java deleted file mode 100644 index 9cb1999367..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.common.io.resultset.ResultSetWriter; -import org.apache.linkis.storage.conf.LinkisStorageConf; -import org.apache.linkis.storage.resultset.table.TableResultSet; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ResultSetWriterFactory { - private static final Logger logger = LoggerFactory.getLogger(ResultSetWriterFactory.class); - - public static ResultSetWriter getResultSetWriter( - ResultSet resultSet, long maxCacheSize, FsPath storePath) { - String engineResultType = LinkisStorageConf.ENGINE_RESULT_TYPE; - ResultSetWriter writer = null; - if (engineResultType.equals(LinkisStorageConf.PARQUET) && resultSet instanceof TableResultSet) { - writer = new ParquetResultSetWriter<>(resultSet, maxCacheSize, storePath); - } else if (engineResultType.equals(LinkisStorageConf.ORC) - && resultSet instanceof TableResultSet) { - writer = new OrcResultSetWriter<>(resultSet, maxCacheSize, storePath); - } else { - writer = new StorageResultSetWriter<>(resultSet, maxCacheSize, storePath); - } - return writer; - } - - public static ResultSetWriter getResultSetWriter( - ResultSet resultSet, long maxCacheSize, FsPath storePath, String proxyUser) { - String engineResultType = LinkisStorageConf.ENGINE_RESULT_TYPE; - ResultSetWriter writer = null; - if (engineResultType.equals(LinkisStorageConf.PARQUET) && resultSet instanceof TableResultSet) { - writer = new ParquetResultSetWriter<>(resultSet, maxCacheSize, storePath); - } else if (engineResultType.equals(LinkisStorageConf.ORC) - && resultSet instanceof TableResultSet) { - writer = new OrcResultSetWriter<>(resultSet, maxCacheSize, storePath); - } else { - writer = new StorageResultSetWriter<>(resultSet, maxCacheSize, storePath); - StorageResultSetWriter storageResultSetWriter = (StorageResultSetWriter) writer; - storageResultSetWriter.setProxyUser(proxyUser); - } - return writer; - } - - public static Record[] getRecordByWriter( - ResultSetWriter writer, long limit) throws IOException { - String res = writer.toString(); - return getRecordByRes(res, limit); - } - - public static Record[] getRecordByRes(String res, long limit) throws IOException { - ResultSetReader reader = ResultSetReaderFactory.getResultSetReader(res); - int count = 0; - List records = new ArrayList<>(); - reader.getMetaData(); - while (reader.hasNext() && count < limit) { - records.add(reader.getRecord()); - count++; - } - return records.toArray(new Record[0]); - } - - public static Record getLastRecordByRes(String res) throws IOException { - ResultSetReader reader = ResultSetReaderFactory.getResultSetReader(res); - Record record = null; - try { - reader.getMetaData(); - while (reader.hasNext()) { - record = reader.getRecord(); - } - } catch (IOException e) { - logger.warn("ResultSetWriter getLastRecordByRes failed", e); - } - return record; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSet.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSet.java deleted file mode 100644 index c708f5faf6..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSet.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.storage.conf.LinkisStorageConf; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.resultset.table.TableResultSet; -import org.apache.linkis.storage.utils.StorageConfiguration; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class StorageResultSet - implements ResultSet { - - private static final Logger logger = LoggerFactory.getLogger(StorageResultSet.class); - - private byte[] resultHeaderBytes = null; - - { - byte[] arr2 = Dolphin.getIntBytes(Integer.parseInt(resultSetType())); - byte[] mergedArray = new byte[Dolphin.MAGIC_BYTES.length + arr2.length]; - System.arraycopy(Dolphin.MAGIC_BYTES, 0, mergedArray, 0, Dolphin.MAGIC_BYTES.length); - System.arraycopy(arr2, 0, mergedArray, Dolphin.MAGIC_BYTES.length, arr2.length); - resultHeaderBytes = mergedArray; - } - - @Override - public String charset() { - return StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue(); - } - - @Override - public FsPath getResultSetPath(FsPath parentDir, String fileName) { - String engineResultType = LinkisStorageConf.ENGINE_RESULT_TYPE; - String fileSuffix = Dolphin.DOLPHIN_FILE_SUFFIX; - if (engineResultType.equals(LinkisStorageConf.PARQUET) && this instanceof TableResultSet) { - fileSuffix = LinkisStorageConf.PARQUET_FILE_SUFFIX; - } else if (engineResultType.equals(LinkisStorageConf.ORC) && this instanceof TableResultSet) { - fileSuffix = LinkisStorageConf.ORC_FILE_SUFFIX; - } - final String path = - parentDir.getPath().endsWith("/") - ? parentDir.getUriString() + fileName + fileSuffix - : parentDir.getUriString() + "/" + fileName + fileSuffix; - logger.info("Get result set path: {}", path); - return new FsPath(path); - } - - @Override - public byte[] getResultSetHeader() { - return resultHeaderBytes; - } - - @Override - public boolean belongToPath(String path) { - return path.endsWith(Dolphin.DOLPHIN_FILE_SUFFIX); - } - - @Override - public boolean belongToResultSet(String content) { - try { - return Dolphin.getType(content).equals(resultSetType()); - } catch (Exception e) { - logger.info("Wrong result set: ", e); - return false; - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetReader.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetReader.java deleted file mode 100644 index c0222cc848..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetReader.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StorageResultSetReader - extends ResultSetReader { - - private static final Logger logger = LoggerFactory.getLogger(StorageResultSetReader.class); - - private final ResultSet resultSet; - private final InputStream inputStream; - private final ResultDeserializer deserializer; - private K metaData; - private Record row; - private int colCount = 0; - private int rowCount = 0; - private Fs fs; - - private final int READ_CACHE = 1024; - - public StorageResultSetReader(ResultSet resultSet, InputStream inputStream) { - super(resultSet, inputStream); - this.resultSet = resultSet; - this.inputStream = inputStream; - this.deserializer = resultSet.createResultSetDeserializer(); - } - - public StorageResultSetReader(ResultSet resultSet, String value) { - this(resultSet, new ByteArrayInputStream(value.getBytes(Dolphin.CHAR_SET))); - } - - public void init() throws IOException { - String resType = Dolphin.getType(inputStream); - if (!StringUtils.equals(resultSet.resultSetType(), resType)) { - throw new RuntimeException( - "File type does not match(文件类型不匹配): " - + ResultSetFactory.resultSetType.getOrDefault(resType, "TABLE")); - } - } - - public byte[] readLine() { - int rowLen = 0; - try { - rowLen = Dolphin.readInt(inputStream); - } catch (StorageWarnException | IOException e) { - logger.info("Read finished(读取完毕)"); - return null; - } - - int len = 0; - byte[] rowBuffer = null; - try { - rowBuffer = new byte[rowLen]; - len = StorageUtils.readBytes(inputStream, rowBuffer, rowLen); - } catch (OutOfMemoryError error) { - logger.error("Result set read oom, read size {} Byte", rowLen); - throw new RuntimeException(error); - } - if (len != rowLen) { - throw new RuntimeException( - "Can't get the value of the field, maybe the IO stream has been read or has been closed!(拿不到字段的值,也许IO流已读取完毕或已被关闭!)"); - } - rowCount++; - return rowBuffer; - } - - @Override - public Record getRecord() { - if (metaData == null) throw new RuntimeException("Must read metadata first(必须先读取metadata)"); - if (row == null) { - throw new RuntimeException( - "Can't get the value of the field, maybe the IO stream has been read or has been closed!(拿不到字段的值,也许IO流已读取完毕或已被关闭!)"); - } - return row; - } - - public void setFs(Fs fs) { - this.fs = fs; - } - - public Fs getFs() { - return fs; - } - - @Override - public MetaData getMetaData() { - if (metaData == null) { - try { - init(); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - metaData = deserializer.createMetaData(readLine()); - return metaData; - } - - @Override - public int skip(int recordNum) throws IOException { - if (recordNum < 0) return -1; - - if (metaData == null) getMetaData(); - for (int i = recordNum; i > 0; i--) { - try { - inputStream.skip(Dolphin.readInt(inputStream)); - } catch (Throwable t) { - return recordNum - i; - } - } - return recordNum; - } - - @Override - public long getPosition() throws IOException { - return rowCount; - } - - @Override - public boolean hasNext() throws IOException { - if (metaData == null) getMetaData(); - byte[] line = readLine(); - if (line == null) return false; - row = deserializer.createRecord(line); - if (row == null) return false; - return true; - } - - @Override - public long available() throws IOException { - return inputStream.available(); - } - - @Override - public void close() throws IOException { - IOUtils.closeQuietly(inputStream); - if (this.fs != null) { - this.fs.close(); - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java deleted file mode 100644 index 230c68301c..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java +++ /dev/null @@ -1,268 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetWriter; -import org.apache.linkis.storage.FSFactory; -import org.apache.linkis.storage.conf.LinkisStorageConf; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.exception.StorageErrorException; -import org.apache.linkis.storage.utils.FileSystemUtils; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.hdfs.client.HdfsDataOutputStream; - -import java.io.IOException; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.exception.StorageErrorCode.FS_ERROR; - -public class StorageResultSetWriter - extends ResultSetWriter { - private static final Logger logger = LoggerFactory.getLogger(StorageResultSetWriter.class); - - private final ResultSet resultSet; - private final long maxCacheSize; - private final FsPath storePath; - - private final ResultSerializer serializer; - private boolean moveToWriteRow = false; - private OutputStream outputStream = null; - private int rowCount = 0; - private final List buffer = new ArrayList(); - private Fs fs = null; - private MetaData rMetaData = null; - private String proxyUser = StorageUtils.getJvmUser(); - private boolean fileCreated = false; - private boolean closed = false; - private final Object WRITER_LOCK_CREATE = new Object(); - private final Object WRITER_LOCK_CLOSE = new Object(); - - public StorageResultSetWriter(ResultSet resultSet, long maxCacheSize, FsPath storePath) { - super(resultSet, maxCacheSize, storePath); - this.resultSet = resultSet; - this.maxCacheSize = maxCacheSize; - this.storePath = storePath; - - this.serializer = resultSet.createResultSetSerializer(); - } - - public MetaData getMetaData() { - return rMetaData; - } - - public void setProxyUser(String proxyUser) { - this.proxyUser = proxyUser; - } - - public boolean isEmpty() { - return rMetaData == null && buffer.size() <= Dolphin.FILE_EMPTY; - } - - public void init() { - try { - writeLine(resultSet.getResultSetHeader(), true); - } catch (IOException e) { - logger.warn("StorageResultSetWriter init failed", e); - } - } - - public void createNewFile() { - if (!fileCreated) { - synchronized (WRITER_LOCK_CREATE) { - if (!fileCreated) { - if (storePath != null && outputStream == null) { - logger.info("Try to create a new file:{}, with proxy user:{}", storePath, proxyUser); - fs = FSFactory.getFsByProxyUser(storePath, proxyUser); - try { - fs.init(null); - FileSystemUtils.createNewFile(storePath, proxyUser, true); - outputStream = fs.write(storePath, true); - } catch (Exception e) { - throw new StorageErrorException( - FS_ERROR.getCode(), "StorageResultSetWriter createNewFile failed", e); - } - logger.info("Succeed to create a new file:{}", storePath); - fileCreated = true; - } - } - } - } else if (storePath != null && outputStream == null) { - logger.warn("outputStream had been set null, but createNewFile() was called again."); - } - } - - public void writeLine(byte[] bytes, boolean cache) throws IOException { - if (closed) { - logger.warn("the writer had been closed, but writeLine() was still called."); - return; - } - if (bytes.length > LinkisStorageConf.ROW_BYTE_MAX_LEN) { - throw new IOException( - String.format( - "A single row of data cannot exceed %s", LinkisStorageConf.ROW_BYTE_MAX_LEN_STR)); - } - if (buffer.size() > maxCacheSize && !cache) { - if (outputStream == null) { - createNewFile(); - } - flush(); - outputStream.write(bytes); - } else { - for (byte b : bytes) { - buffer.add(b); - } - } - } - - @Override - public String toString() { - if (outputStream == null) { - if (isEmpty()) { - return ""; - } - - byte[] byteArray = getBytes(); - return new String(byteArray, Dolphin.CHAR_SET); - } - return storePath.getSchemaPath(); - } - - private byte[] getBytes() { - byte[] byteArray = new byte[buffer.size()]; - for (int i = 0; i < buffer.size(); i++) { - byteArray[i] = buffer.get(i); - } - return byteArray; - } - - @Override - public FsPath toFSPath() { - return storePath; - } - - @Override - public void addMetaDataAndRecordString(String content) { - if (!moveToWriteRow) { - byte[] bytes = content.getBytes(Dolphin.CHAR_SET); - try { - writeLine(bytes, false); - } catch (IOException e) { - logger.warn("addMetaDataAndRecordString failed", e); - } - } - moveToWriteRow = true; - } - - @Override - public void addRecordString(String content) {} - - @Override - public void addMetaData(MetaData metaData) throws IOException { - if (!moveToWriteRow) { - rMetaData = metaData; - init(); - if (metaData == null) { - writeLine(serializer.metaDataToBytes(metaData), true); - } else { - writeLine(serializer.metaDataToBytes(metaData), false); - } - moveToWriteRow = true; - } - } - - @Override - public void addRecord(Record record) { - if (moveToWriteRow) { - rowCount++; - try { - writeLine(serializer.recordToBytes(record), false); - } catch (IOException e) { - logger.warn("addMetaDataAndRecordString failed", e); - } - } - } - - public void closeFs() { - if (fs != null) { - IOUtils.closeQuietly(fs); - fs = null; - } - } - - @Override - public void close() { - if (closed) { - logger.warn("the writer had been closed, but close() was still called."); - return; - } - synchronized (WRITER_LOCK_CLOSE) { - if (!closed) { - closed = true; - } else { - return; - } - } - try { - if (outputStream != null) { - flush(); - } - } finally { - if (outputStream != null) { - IOUtils.closeQuietly(outputStream); - outputStream = null; - } - closeFs(); - } - } - - @Override - public void flush() { - createNewFile(); - if (outputStream != null) { - try { - if (!buffer.isEmpty()) { - outputStream.write(getBytes()); - buffer.clear(); - } - if (outputStream instanceof HdfsDataOutputStream) { - ((HdfsDataOutputStream) outputStream).hflush(); - } else { - outputStream.flush(); - } - } catch (IOException e) { - logger.warn("Error encountered when flush result set", e); - } - } - if (closed && logger.isDebugEnabled()) { - logger.debug("the writer had been closed, but flush() was still called."); - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/html/HtmlResultSet.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/html/HtmlResultSet.java deleted file mode 100644 index 00c0e7b2a7..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/html/HtmlResultSet.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.html; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.StorageResultSet; -import org.apache.linkis.storage.resultset.txt.TextResultDeserializer; -import org.apache.linkis.storage.resultset.txt.TextResultSerializer; - -import java.io.Serializable; - -public class HtmlResultSet extends StorageResultSet - implements Serializable { - - @Override - public String resultSetType() { - return ResultSetFactory.HTML_TYPE; - } - - @Override - public ResultSerializer createResultSetSerializer() { - return new TextResultSerializer(); - } - - @Override - public ResultDeserializer createResultSetDeserializer() { - return new TextResultDeserializer(); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultDeserializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultDeserializer.java deleted file mode 100644 index 02d83aa7bb..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultDeserializer.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.io; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.storage.domain.Dolphin; - -import org.apache.commons.codec.binary.Base64; - -public class IOResultDeserializer extends ResultDeserializer { - - @Override - public IOMetaData createMetaData(byte[] bytes) { - String[] values = Dolphin.getString(bytes, 0, bytes.length).split(Dolphin.COL_SPLIT); - return new IOMetaData(Integer.parseInt(values[0]), Integer.parseInt(values[1])); - } - - @Override - public IORecord createRecord(byte[] bytes) { - return new IORecord(Base64.decodeBase64(Dolphin.getString(bytes, 0, bytes.length))); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSerializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSerializer.java deleted file mode 100644 index 2401e361a5..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSerializer.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.io; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.codec.binary.Base64; - -public class IOResultSerializer extends ResultSerializer { - - @Override - public byte[] metaDataToBytes(MetaData metaData) { - IOMetaData ioMetaData = (IOMetaData) metaData; - return lineToBytes(ioMetaData.off + Dolphin.COL_SPLIT + ioMetaData.len); - } - - @Override - public byte[] recordToBytes(Record record) { - IORecord ioRecord = (IORecord) record; - return lineToBytes(Base64.encodeBase64String(ioRecord.value)); - } - - private byte[] lineToBytes(String value) { - byte[] bytes = value == null ? Dolphin.NULL_BYTES : Dolphin.getBytes(value); - byte[] intBytes = Dolphin.getIntBytes(bytes.length); - return StorageUtils.mergeByteArrays(intBytes, bytes); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSet.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSet.java deleted file mode 100644 index 67f8e76904..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/io/IOResultSet.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.io; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.StorageResultSet; - -import java.io.Serializable; - -public class IOResultSet extends StorageResultSet implements Serializable { - - @Override - public String resultSetType() { - return ResultSetFactory.IO_TYPE; - } - - @Override - public ResultSerializer createResultSetSerializer() { - return new IOResultSerializer(); - } - - @Override - public ResultDeserializer createResultSetDeserializer() { - return new IOResultDeserializer(); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/picture/PictureResultSet.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/picture/PictureResultSet.java deleted file mode 100644 index 5e73592a7b..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/picture/PictureResultSet.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.picture; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.StorageResultSet; -import org.apache.linkis.storage.resultset.txt.TextResultDeserializer; -import org.apache.linkis.storage.resultset.txt.TextResultSerializer; - -import java.io.Serializable; - -public class PictureResultSet extends StorageResultSet - implements Serializable { - - @Override - public String resultSetType() { - return ResultSetFactory.PICTURE_TYPE; - } - - @Override - public ResultSerializer createResultSetSerializer() { - return new TextResultSerializer(); - } - - @Override - public ResultDeserializer createResultSetDeserializer() { - return new TextResultDeserializer(); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultDeserializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultDeserializer.java deleted file mode 100644 index 7e1d6c35fe..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultDeserializer.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.table; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.exception.StorageWarnException; - -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.PARSING_METADATA_FAILED; - -public class TableResultDeserializer extends ResultDeserializer { - private static final Logger logger = LoggerFactory.getLogger(TableResultDeserializer.class); - - private TableMetaData metaData; - - @Override - public TableMetaData createMetaData(byte[] bytes) { - int colByteLen = Integer.parseInt(Dolphin.getString(bytes, 0, Dolphin.INT_LEN)); - String colString = Dolphin.getString(bytes, Dolphin.INT_LEN, colByteLen); - String[] colArray = - colString.endsWith(Dolphin.COL_SPLIT) - ? colString.substring(0, colString.length() - 1).split(Dolphin.COL_SPLIT) - : colString.split(Dolphin.COL_SPLIT); - int index = Dolphin.INT_LEN + colByteLen; - if (colArray.length % 3 != 0) { - throw new StorageWarnException( - PARSING_METADATA_FAILED.getErrorCode(), PARSING_METADATA_FAILED.getErrorDesc()); - } - List columns = new ArrayList<>(); - for (int i = 0; i < colArray.length; i += 3) { - int len = Integer.parseInt(colArray[i]); - String colName = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)); - index += len; - len = Integer.parseInt(colArray[i + 1]); - String colType = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)); - index += len; - len = Integer.parseInt(colArray[i + 2]); - String colComment = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)); - index += len; - columns.add(new Column(colName, DataType.toDataType(colType), colComment)); - } - metaData = new TableMetaData(columns.toArray(new Column[0])); - return metaData; - } - - /** - * colByteLen:All column fields are long(所有列字段长 记录的长度) colString:Obtain column - * length(获得列长):10,20,21 colArray:Column length array(列长数组) Get data by column length(通过列长获得数据) - * - * @param bytes - * @return - */ - @Override - public TableRecord createRecord(byte[] bytes) { - int colByteLen = Integer.parseInt(Dolphin.getString(bytes, 0, Dolphin.INT_LEN)); - String colString = Dolphin.getString(bytes, Dolphin.INT_LEN, colByteLen); - String[] colArray; - if (colString.endsWith(Dolphin.COL_SPLIT)) { - colArray = colString.substring(0, colString.length() - 1).split(Dolphin.COL_SPLIT); - } else { - colArray = colString.split(Dolphin.COL_SPLIT); - } - int index = Dolphin.INT_LEN + colByteLen; - Object[] data = new Object[colArray.length]; - for (int i = 0; i < colArray.length; i++) { - int len = Integer.parseInt(colArray[i]); - String res = Dolphin.getString(bytes, index, len); - index += len; - if (i >= metaData.columns.length) { - data[i] = res; - } else { - data[i] = DataType.toValue(metaData.columns[i].getDataType(), res); - } - } - return new TableRecord(data); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSerializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSerializer.java deleted file mode 100644 index 5f40aa33f3..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSerializer.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.table; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.Dolphin; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -public class TableResultSerializer extends ResultSerializer { - - @Override - public byte[] metaDataToBytes(MetaData metaData) { - TableMetaData tableMetaData = (TableMetaData) metaData; - Object[] objects = - Arrays.stream(tableMetaData.columns).map(Column::toArray).flatMap(Arrays::stream).toArray(); - return lineToBytes(objects); - } - - @Override - public byte[] recordToBytes(Record record) { - TableRecord tableRecord = (TableRecord) record; - return lineToBytes(tableRecord.row); - } - - /** - * Convert a row of data to an array of Bytes Convert the data to byte and get the corresponding - * total byte length to write to the file Data write format: line length (fixed length) column - * length (fixed length) field index comma segmentation real data For example: - * 000000004900000000116,10,3,4,5,peace1johnnwang1101true11.51 The length of the line does not - * include its own length 将一行数据转换为Bytes的数组 对数据转换为byte,并获取相应的总byte长度写入文件 数据写入格式:行长(固定长度) 列长(固定长度) - * 字段索引逗号分割 真实数据 如:000000004900000000116,10,3,4,5,peace1johnnwang1101true11.51 其中行长不包括自身长度 - * - * @param line - */ - private byte[] lineToBytes(Object[] line) { - // Data cache(数据缓存) - List dataBytes = new ArrayList<>(); - // Column cache(列缓存) - List colIndex = new ArrayList<>(); - int colByteLen = 0; - int length = 0; - for (Object data : line) { - byte[] bytes = data == null ? Dolphin.LINKIS_NULL_BYTES : Dolphin.getBytes(data); - dataBytes.add(bytes); - byte[] colBytes = Dolphin.getBytes(bytes.length); - colIndex.add(colBytes); - colIndex.add(Dolphin.COL_SPLIT_BYTES); - colByteLen += colBytes.length + Dolphin.COL_SPLIT_LEN; - length += bytes.length; - } - length += colByteLen + Dolphin.INT_LEN; - return toByteArray(length, colByteLen, colIndex, dataBytes); - } - - /** - * Splice a row of data into a byte array(将一行的数据拼接成byte数组) - * - * @param length The total length of the line data byte, excluding its own - * length(行数据byte总长度,不包括自身的长度) - * @param colByteLen Record field index byte column length(记录字段索引byte的列长) - * @param colIndex Field index, including separator comma(字段索引,包括分割符逗号) - * @param dataBytes Byte of real data(真实数据的byte) - * @return - */ - public static byte[] toByteArray( - int length, int colByteLen, List colIndex, List dataBytes) { - List row = new ArrayList<>(); - colIndex.addAll(dataBytes); - - for (byte intByte : Dolphin.getIntBytes(length)) { - row.add(intByte); - } - - for (byte colByte : Dolphin.getIntBytes(colByteLen)) { - row.add(colByte); - } - - colIndex.forEach( - bytes -> { - for (byte b : bytes) { - row.add(b); - } - }); - byte[] result = new byte[row.size()]; - for (int i = 0; i < result.length; i++) { - result[i] = row.get(i); - } - return result; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSet.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSet.java deleted file mode 100644 index 5f356af0ca..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableResultSet.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.table; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.StorageResultSet; - -import java.io.Serializable; - -public class TableResultSet extends StorageResultSet - implements Serializable { - - @Override - public String resultSetType() { - return ResultSetFactory.TABLE_TYPE; - } - - @Override - public ResultSerializer createResultSetSerializer() { - return new TableResultSerializer(); - } - - @Override - public ResultDeserializer createResultSetDeserializer() { - return new TableResultDeserializer(); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.java deleted file mode 100644 index 3165e5af86..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.txt; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.domain.Dolphin; - -public class TextResultDeserializer extends ResultDeserializer { - - @Override - public LineMetaData createMetaData(byte[] bytes) { - return new LineMetaData(Dolphin.getString(bytes, 0, bytes.length)); - } - - @Override - public LineRecord createRecord(byte[] bytes) { - return new LineRecord(Dolphin.getString(bytes, 0, bytes.length)); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSerializer.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSerializer.java deleted file mode 100644 index 5555ad9eba..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSerializer.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.txt; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.utils.StorageUtils; - -public class TextResultSerializer extends ResultSerializer { - - @Override - public byte[] metaDataToBytes(MetaData metaData) { - if (metaData == null) { - return lineToBytes(null); - } else { - LineMetaData textMetaData = (LineMetaData) metaData; - return lineToBytes(textMetaData.getMetaData()); - } - } - - @Override - public byte[] recordToBytes(Record record) { - LineRecord textRecord = (LineRecord) record; - return lineToBytes(textRecord.getLine()); - } - - private byte[] lineToBytes(String value) { - byte[] bytes = (value == null) ? Dolphin.NULL_BYTES : Dolphin.getBytes(value); - return StorageUtils.mergeByteArrays(Dolphin.getIntBytes(bytes.length), bytes); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSet.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSet.java deleted file mode 100644 index 19fd8f9dbc..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/txt/TextResultSet.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset.txt; - -import org.apache.linkis.common.io.resultset.ResultDeserializer; -import org.apache.linkis.common.io.resultset.ResultSerializer; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.StorageResultSet; - -import java.io.Serializable; - -public class TextResultSet extends StorageResultSet - implements Serializable { - - @Override - public String resultSetType() { - return ResultSetFactory.TEXT_TYPE; - } - - @Override - public ResultSerializer createResultSetSerializer() { - return new TextResultSerializer(); - } - - @Override - public ResultDeserializer createResultSetDeserializer() { - return new TextResultDeserializer(); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Compaction.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Compaction.java deleted file mode 100644 index abfbae9c99..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Compaction.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script; - -import org.apache.linkis.storage.script.compaction.PYScriptCompaction; -import org.apache.linkis.storage.script.compaction.QLScriptCompaction; -import org.apache.linkis.storage.script.compaction.ScalaScriptCompaction; -import org.apache.linkis.storage.script.compaction.ShellScriptCompaction; - -public interface Compaction { - String prefixConf(); - - String prefix(); - - boolean belongTo(String suffix); - - String compact(Variable variable); - - public static Compaction[] listCompactions() { - return new Compaction[] { - new PYScriptCompaction(), - new QLScriptCompaction(), - new ScalaScriptCompaction(), - new ShellScriptCompaction() - }; - } - - String getAnnotationSymbol(); -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ParserFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ParserFactory.java deleted file mode 100644 index 58edf3dfe9..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ParserFactory.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script; - -import org.apache.linkis.storage.script.parser.PYScriptParser; -import org.apache.linkis.storage.script.parser.QLScriptParser; -import org.apache.linkis.storage.script.parser.ScalaScriptParser; -import org.apache.linkis.storage.script.parser.ShellScriptParser; - -public class ParserFactory { - public static Parser[] listParsers() { - return new Parser[] { - new PYScriptParser(), new QLScriptParser(), new ScalaScriptParser(), new ShellScriptParser() - }; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsWriter.java deleted file mode 100644 index 835e1bfb21..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsWriter.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.FsWriter; -import org.apache.linkis.storage.script.writer.StorageScriptFsWriter; - -import java.io.InputStream; -import java.io.OutputStream; - -public abstract class ScriptFsWriter extends FsWriter { - FsPath path; - String charset; - - public abstract InputStream getInputStream(); - - public static ScriptFsWriter getScriptFsWriter( - FsPath path, String charset, OutputStream outputStream) { - return new StorageScriptFsWriter(path, charset, outputStream); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Variable.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Variable.java deleted file mode 100644 index 8c726671ba..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/Variable.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script; - -public class Variable { - protected String sortParent; - protected String sort; - protected String key; - protected String value; - - public Variable(String sortParent, String sort, String key, String value) { - this.sortParent = sortParent; - this.sort = sort; - this.key = key; - this.value = value; - } - - public String getSortParent() { - return sortParent; - } - - public String getSort() { - return sort; - } - - public String getKey() { - return key; - } - - public String getValue() { - return value; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/VariableParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/VariableParser.java deleted file mode 100644 index cd10746457..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/VariableParser.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script; - -import java.util.*; - -public class VariableParser { - - public static final String CONFIGURATION = "configuration"; - public static final String VARIABLE = "variable"; - public static final String RUNTIME = "runtime"; - public static final String STARTUP = "startup"; - public static final String SPECIAL = "special"; - - public static Variable[] getVariables(Map params) { - List variables = new ArrayList<>(); - Map variableMap = - (Map) params.getOrDefault(VARIABLE, new HashMap()); - for (Map.Entry entry : variableMap.entrySet()) { - variables.add(new Variable(VARIABLE, null, entry.getKey(), entry.getValue().toString())); - } - - Map configurationMap = - (Map) params.getOrDefault(CONFIGURATION, new HashMap()); - for (Map.Entry entry : configurationMap.entrySet()) { - Map subMap = (Map) entry.getValue(); - for (Map.Entry subEntry : subMap.entrySet()) { - if (!isContextIDINFO(subEntry.getKey())) { - Object value = subEntry.getValue(); - if (value instanceof Map) { - Map innerMap = (Map) value; - for (Map.Entry innerEntry : innerMap.entrySet()) { - if (!isContextIDINFO(innerEntry.getKey())) { - variables.add( - new Variable( - entry.getKey(), - subEntry.getKey(), - innerEntry.getKey(), - innerEntry.getValue().toString())); - } - } - } else { - if (value == null) { - variables.add(new Variable(CONFIGURATION, entry.getKey(), subEntry.getKey(), "")); - } else { - variables.add( - new Variable(CONFIGURATION, entry.getKey(), subEntry.getKey(), value.toString())); - } - } - } - } - } - return variables.toArray(new Variable[variables.size()]); - } - - private static boolean isContextIDINFO(String key) { - return "contextID".equalsIgnoreCase(key) || "nodeName".equalsIgnoreCase(key); - } - - public static Map getMap(Variable[] variables) { - Map variableKey2Value = new HashMap<>(); - Map confs = new HashMap<>(); - - Arrays.stream(variables) - .filter(variable -> variable.sort == null) - .forEach(v -> variableKey2Value.put(v.key, v.value)); - - Arrays.stream(variables) - .filter(variable -> variable.sort != null) - .forEach( - v -> { - switch (v.getSort()) { - case STARTUP: - case RUNTIME: - case SPECIAL: - if (!confs.containsKey(v.getSort())) { - confs.put(v.getSort(), createMap(v)); - } else { - Map subMap = (Map) confs.get(v.getSort()); - subMap.put(v.getKey(), v.getValue()); - } - break; - default: - if (!confs.containsKey(v.getSortParent())) { - Map subMap = new HashMap<>(); - subMap.put(v.getSort(), createMap(v)); - confs.put(v.getSortParent(), subMap); - } else { - Map subMap = (Map) confs.get(v.getSortParent()); - if (!subMap.containsKey(v.getSort())) { - subMap.put(v.getSort(), createMap(v)); - } else { - Map innerMap = (Map) subMap.get(v.getSort()); - innerMap.put(v.getKey(), v.getValue()); - } - } - break; - } - }); - - Map params = new HashMap<>(); - if (!variableKey2Value.isEmpty()) { - params.put(VARIABLE, variableKey2Value); - } - if (!confs.isEmpty()) { - params.put(CONFIGURATION, confs); - } - return params; - } - - private static Map createMap(Variable variable) { - Map map = new HashMap<>(); - map.put(variable.getKey(), variable.getValue()); - return map; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/PYScriptCompaction.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/PYScriptCompaction.java deleted file mode 100644 index 4022e7d46d..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/PYScriptCompaction.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.compaction; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class PYScriptCompaction extends CommonScriptCompaction { - - private static final PYScriptCompaction pYScriptCompaction = new PYScriptCompaction(); - - public static CommonScriptCompaction apply() { - return pYScriptCompaction; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON()); - } - - @Override - public String prefix() { - return "#@set"; - } - - @Override - public String prefixConf() { - return "#conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/QLScriptCompaction.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/QLScriptCompaction.java deleted file mode 100644 index 97fc29efa0..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/QLScriptCompaction.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.compaction; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class QLScriptCompaction extends CommonScriptCompaction { - - private static final QLScriptCompaction qLScriptCompaction = new QLScriptCompaction(); - - public static CommonScriptCompaction apply() { - return qLScriptCompaction; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL()); - } - - @Override - public String prefix() { - return "--@set"; - } - - @Override - public String prefixConf() { - return "--conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.java deleted file mode 100644 index 18d5d2f531..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.compaction; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class ScalaScriptCompaction extends CommonScriptCompaction { - - private static final ScalaScriptCompaction compaction = new ScalaScriptCompaction(); - - public static CommonScriptCompaction apply() { - return compaction; - } - - @Override - public String prefix() { - return "//@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA()); - } - - @Override - public String prefixConf() { - return "//conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.java deleted file mode 100644 index 085815b559..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.compaction; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class ShellScriptCompaction extends CommonScriptCompaction { - - private static final ShellScriptCompaction shellScriptCompaction = new ShellScriptCompaction(); - - public static CommonScriptCompaction apply() { - return shellScriptCompaction; - } - - @Override - public String prefix() { - return "#@set"; - } - - @Override - public String prefixConf() { - return "#conf@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL()); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/CommonScriptParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/CommonScriptParser.java deleted file mode 100644 index 0d7e9f58e1..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/CommonScriptParser.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.parser; - -import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.script.Parser; -import org.apache.linkis.storage.script.Variable; -import org.apache.linkis.storage.script.VariableParser; - -import java.util.Arrays; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public abstract class CommonScriptParser implements Parser { - - @Override - public Variable parse(String line) { - String variableReg = "\\s*" + prefix() + "\\s*(.+)\\s*" + "=" + "\\s*(.+)\\s*"; - Pattern pattern = Pattern.compile(variableReg); - Matcher matcher = pattern.matcher(line); - if (matcher.matches()) { - String key = matcher.group(1).trim(); - String value = matcher.group(2).trim(); - return new Variable(VariableParser.VARIABLE, null, key, value); - - } else { - String[] splitLine = line.split("="); - if (splitLine.length != 2) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorCode(), - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorDesc()); - } - String[] subSplit = - Arrays.stream(splitLine[0].split(" ")) - .filter(str -> !"".equals(str)) - .toArray(String[]::new); - if (subSplit.length != 4) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorCode(), - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorDesc()); - } - if (!subSplit[0].trim().equals(prefixConf())) { - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorCode(), - LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER.getErrorDesc()); - } - String sortParent = subSplit[1].trim(); - String sort = subSplit[2].trim(); - String key = subSplit[3].trim(); - String value = splitLine[1].trim(); - return new Variable(sortParent, sort, key, value); - } - } - - @Override - public String getAnnotationSymbol() { - return prefix().split("@")[0]; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/PYScriptParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/PYScriptParser.java deleted file mode 100644 index 3d5edcfac7..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/PYScriptParser.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.parser; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class PYScriptParser extends CommonScriptParser { - - private static final PYScriptParser pYScriptParser = new PYScriptParser(); - - public static CommonScriptParser apply() { - return pYScriptParser; - } - - @Override - public String prefix() { - return "#@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON()); - } - - @Override - public String prefixConf() { - return "#conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/QLScriptParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/QLScriptParser.java deleted file mode 100644 index 806a84760f..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/QLScriptParser.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.parser; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class QLScriptParser extends CommonScriptParser { - - private static final QLScriptParser qLScriptParser = new QLScriptParser(); - - public static CommonScriptParser apply() { - return qLScriptParser; - } - - @Override - public String prefix() { - return "--@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL()); - } - - @Override - public String prefixConf() { - return "--conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ScalaScriptParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ScalaScriptParser.java deleted file mode 100644 index 982538d8e8..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ScalaScriptParser.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.parser; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class ScalaScriptParser extends CommonScriptParser { - - private static final ScalaScriptParser scalaScriptParser = new ScalaScriptParser(); - - public static CommonScriptParser apply() { - return scalaScriptParser; - } - - @Override - public String prefix() { - return "//@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA()); - } - - @Override - public String prefixConf() { - return "//conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ShellScriptParser.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ShellScriptParser.java deleted file mode 100644 index e1eebe3e58..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/parser/ShellScriptParser.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.parser; - -import org.apache.linkis.common.utils.CodeAndRunTypeUtils; - -public class ShellScriptParser extends CommonScriptParser { - - private static final ShellScriptParser shellScriptParser = new ShellScriptParser(); - - public static CommonScriptParser create() { - return shellScriptParser; - } - - @Override - public String prefix() { - return "#@set"; - } - - @Override - public boolean belongTo(String suffix) { - return CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( - suffix, CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL()); - } - - @Override - public String prefixConf() { - return "#conf@set"; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/reader/StorageScriptFsReader.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/reader/StorageScriptFsReader.java deleted file mode 100644 index 1f862fa6b1..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/reader/StorageScriptFsReader.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.reader; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.script.*; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.io.IOUtils; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.*; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -public class StorageScriptFsReader extends ScriptFsReader { - private final FsPath path; - private final String charset; - private final InputStream inputStream; - - private InputStreamReader inputStreamReader; - private BufferedReader bufferedReader; - - private ScriptMetaData metadata; - - private List variables = new ArrayList<>(); - private String lineText; - - public StorageScriptFsReader(FsPath path, String charset, InputStream inputStream) { - super(path, charset); - this.path = path; - this.charset = charset; - this.inputStream = inputStream; - } - - @Override - public Record getRecord() throws IOException { - if (metadata == null) throw new IOException("Must read metadata first(必须先读取metadata)"); - ScriptRecord record = new ScriptRecord(lineText); - lineText = bufferedReader.readLine(); - return record; - } - - @Override - public MetaData getMetaData() throws IOException { - if (metadata == null) init(); - Parser parser = getScriptParser(); - lineText = bufferedReader.readLine(); - while (hasNext() - && Objects.nonNull(parser) - && isMetadata(lineText, parser.prefix(), parser.prefixConf())) { - variables.add(parser.parse(lineText)); - lineText = bufferedReader.readLine(); - } - metadata = new ScriptMetaData(variables.toArray(new Variable[0])); - return metadata; - } - - public void init() { - inputStreamReader = new InputStreamReader(inputStream); - bufferedReader = new BufferedReader(inputStreamReader); - } - - @Override - public int skip(int recordNum) throws IOException { - if (recordNum < 0) return -1; - if (metadata == null) getMetaData(); - try { - return (int) bufferedReader.skip(recordNum); - } catch (Throwable t) { - return recordNum; - } - } - - @Override - public long getPosition() throws IOException { - return -1L; - } - - @Override - public boolean hasNext() throws IOException { - return lineText != null; - } - - @Override - public long available() throws IOException { - return inputStream != null ? inputStream.available() : 0L; - } - - @Override - public void close() throws IOException { - IOUtils.closeQuietly(bufferedReader); - IOUtils.closeQuietly(inputStreamReader); - IOUtils.closeQuietly(inputStream); - } - - /** - * Determine if the read line is metadata(判断读的行是否是metadata) - * - * @param line - * @return - */ - public boolean isMetadata(String line, String prefix, String prefixConf) { - String regex = "\\s*" + prefix + "\\s*(.+)\\s*=\\s*(.+)\\s*"; - if (line.matches(regex)) { - return true; - } else { - String[] split = line.split("="); - if (split.length != 2) { - return false; - } - if (Stream.of(split[0].split(" ")).filter(str -> !"".equals(str)).count() != 4) { - return false; - } - - Optional optional = - Stream.of(split[0].split(" ")).filter(str -> !"".equals(str)).findFirst(); - if (optional.isPresent() && !optional.get().equals(prefixConf)) { - return false; - } - return true; - } - } - - /** - * get the script parser according to the path(根据文件路径 获取对应的script parser ) - * - * @return Scripts Parser - */ - public Parser getScriptParser() { - List parsers = - Arrays.stream(ParserFactory.listParsers()) - .filter(p -> p.belongTo(StorageUtils.pathToSuffix(path.getPath()))) - .collect(Collectors.toList()); - if (parsers.size() > 0) { - return parsers.get(0); - } else { - return null; - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.java deleted file mode 100644 index 84dd6abb83..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.script.writer; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.script.Compaction; -import org.apache.linkis.storage.script.ScriptFsWriter; -import org.apache.linkis.storage.script.ScriptMetaData; -import org.apache.linkis.storage.script.Variable; -import org.apache.linkis.storage.utils.StorageConfiguration; -import org.apache.linkis.storage.utils.StorageUtils; - -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.hdfs.client.HdfsDataOutputStream; - -import java.io.*; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class StorageScriptFsWriter extends ScriptFsWriter { - private static final Logger log = LoggerFactory.getLogger(StorageScriptFsWriter.class); - - private final FsPath path; - private final String charset; - private final OutputStream outputStream; - private final StringBuilder stringBuilder = new StringBuilder(); - - public StorageScriptFsWriter(FsPath path, String charset, OutputStream outputStream) { - this.path = path; - this.charset = charset; - this.outputStream = outputStream; - } - - @Override - public void addMetaData(MetaData metaData) throws IOException { - String suffix = StorageUtils.pathToSuffix(path.getPath()); - List compactions = - Stream.of(Compaction.listCompactions()) - .filter(compaction -> compaction.belongTo(suffix)) - .collect(Collectors.toList()); - List metadataLine = new ArrayList<>(); - if (!compactions.isEmpty()) { - Variable[] metaData1 = ((ScriptMetaData) metaData).getMetaData(); - Stream.of(metaData1).map(compactions.get(0)::compact).forEach(metadataLine::add); - - // add annotition symbol - if (metadataLine.size() > 0) { - metadataLine.add(compactions.get(0).getAnnotationSymbol()); - } - if (outputStream != null) { - IOUtils.writeLines(metadataLine, "\n", outputStream, charset); - } else { - metadataLine.forEach(m -> stringBuilder.append(m).append("\n")); - } - } - } - - @Override - public void addRecord(Record record) throws IOException { - LineRecord scriptRecord = (LineRecord) record; - if (outputStream != null) { - IOUtils.write(scriptRecord.getLine(), outputStream, charset); - } else { - stringBuilder.append(scriptRecord.getLine()); - } - } - - @Override - public void close() { - IOUtils.closeQuietly(outputStream); - } - - @Override - public void flush() { - if (outputStream instanceof HdfsDataOutputStream) { - try { - ((HdfsDataOutputStream) outputStream).hflush(); - } catch (IOException t) { - log.warn("Error encountered when flush script", t); - } - } else if (outputStream != null) { - try { - outputStream.flush(); - } catch (IOException t) { - log.warn("Error encountered when flush script", t); - } - } - } - - @Override - public InputStream getInputStream() { - byte[] bytes = null; - try { - bytes = - stringBuilder.toString().getBytes(StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue()); - } catch (UnsupportedEncodingException e) { - log.warn("StorageScriptFsWriter getInputStream failed", e); - } - return new ByteArrayInputStream(bytes); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/AbstractFileSource.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/AbstractFileSource.java deleted file mode 100644 index fc4e615b36..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/AbstractFileSource.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.common.io.FsWriter; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.math3.util.Pair; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; - -public abstract class AbstractFileSource implements FileSource { - - private FileSplit[] fileSplits; - - public AbstractFileSource(FileSplit[] fileSplits) { - this.fileSplits = fileSplits; - } - - @Override - public FileSource shuffle(Function function) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.shuffler = function); - return this; - } - - @Override - public FileSource page(int page, int pageSize) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.page(page, pageSize)); - return this; - } - - @Override - public FileSource addParams(Map params) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.addParams(params)); - return this; - } - - @Override - public FileSource addParams(String key, String value) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.addParams(key, value)); - return this; - } - - @Override - public FileSplit[] getFileSplits() { - return this.fileSplits; - } - - @Override - public Map getParams() { - return Arrays.stream(fileSplits) - .map(FileSplit::getParams) - .flatMap(map -> map.entrySet().stream()) - .collect( - Collectors.toMap( - Map.Entry::getKey, Map.Entry::getValue, (existingValue, newValue) -> newValue)); - } - - @Override - public void write(FsWriter fsWriter) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.write(fsWriter)); - } - - @Override - public void close() { - Arrays.stream(fileSplits).forEach(IOUtils::closeQuietly); - } - - @Override - public Pair>[] collect() { - return Arrays.stream(fileSplits).map(FileSplit::collect).toArray(Pair[]::new); - } - - @Override - public int getTotalLine() { - return Arrays.stream(fileSplits).mapToInt(FileSplit::getTotalLine).sum(); - } - - @Override - public String[] getTypes() { - return Arrays.stream(fileSplits).map(FileSplit::getType).toArray(String[]::new); - } - - @Override - public Pair[] getFileInfo(int needToCountRowNumber) { - return Arrays.stream(fileSplits) - .map(fileSplit -> fileSplit.getFileInfo(needToCountRowNumber)) - .toArray(Pair[]::new); - } - - @Override - public FileSource limitBytes(Long limitBytes) { - Arrays.stream(fileSplits).forEach(fileSplit -> fileSplit.setLimitBytes(limitBytes)); - return this; - } - - @Override - public FileSource limitColumnLength(int limitColumnLength) { - Arrays.stream(fileSplits) - .forEach(fileSplit -> fileSplit.setLimitColumnLength(limitColumnLength)); - return this; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSource.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSource.java deleted file mode 100644 index f7a5d3e512..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSource.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.common.io.*; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.storage.conf.LinkisStorageConf; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.ResultSetReaderFactory; -import org.apache.linkis.storage.script.ScriptFsReader; -import org.apache.linkis.storage.utils.StorageConfiguration; - -import org.apache.commons.math3.util.Pair; - -import java.io.Closeable; -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.function.BiFunction; -import java.util.function.Function; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE; - -public interface FileSource extends Closeable { - Logger logger = LoggerFactory.getLogger(FileSource.class); - - FileSource shuffle(Function s); - - FileSource page(int page, int pageSize); - - Pair>[] collect(); - - Pair[] getFileInfo(int needToCountRowNumber); - - void write(FsWriter fsWriter); - - FileSource addParams(Map params); - - FileSource addParams(String key, String value); - - Map getParams(); - - int getTotalLine(); - - String[] getTypes(); - - FileSplit[] getFileSplits(); - - String[] fileType = LinkisStorageConf.getFileTypeArr(); - BiFunction suffixPredicate = - (path, suffix) -> path.endsWith("." + suffix); - - static boolean isResultSet(String path) { - return suffixPredicate.apply(path, LinkisStorageConf.DOLPHIN) - || suffixPredicate.apply(path, LinkisStorageConf.PARQUET) - || suffixPredicate.apply(path, LinkisStorageConf.ORC); - } - - static boolean isResultSet(FsPath fsPath) { - return isResultSet(fsPath.getPath()); - } - - FileSource limitBytes(Long limitBytes); - - FileSource limitColumnLength(int limitColumnLength); - - /** - * Currently only supports table multi-result sets - * - * @param fsPaths - * @param fs - * @return - */ - static FileSource create(FsPath[] fsPaths, Fs fs) { - // Filter non-table result sets - FileSplit[] fileSplits = - Arrays.stream(fsPaths) - .map(fsPath -> createResultSetFileSplit(fsPath, fs)) - .filter(FileSource::isTableResultSet) - .toArray(FileSplit[]::new); - return new ResultsetFileSource(fileSplits); - } - - static boolean isTableResultSet(FileSplit fileSplit) { - return fileSplit.type.equals(ResultSetFactory.TABLE_TYPE); - } - - static boolean isTableResultSet(FileSource fileSource) { - // Return true only if all splits are table result sets - return Arrays.stream(fileSource.getFileSplits()).allMatch(FileSource::isTableResultSet); - } - - static FileSource create(FsPath fsPath, Fs fs) { - if (!canRead(fsPath.getPath())) { - throw new StorageWarnException( - UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), UNSUPPORTED_OPEN_FILE_TYPE.getErrorDesc()); - } - if (isResultSet(fsPath)) { - return new ResultsetFileSource(new FileSplit[] {createResultSetFileSplit(fsPath, fs)}); - } else { - return new TextFileSource(new FileSplit[] {createTextFileSplit(fsPath, fs)}); - } - } - - static FileSource create(FsPath fsPath, InputStream is) { - if (!canRead(fsPath.getPath())) { - throw new StorageWarnException( - UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), UNSUPPORTED_OPEN_FILE_TYPE.getErrorDesc()); - } - if (isResultSet(fsPath)) { - return new ResultsetFileSource(new FileSplit[] {createResultSetFileSplit(fsPath, is)}); - } else { - return new TextFileSource(new FileSplit[] {createTextFileSplit(fsPath, is)}); - } - } - - static FileSplit createResultSetFileSplit(FsPath fsPath, InputStream is) { - logger.info("try create result set file split with path:{}", fsPath.getPath()); - ResultSet resultset = ResultSetFactory.getInstance().getResultSetByPath(fsPath); - ResultSetReader resultsetReader = - ResultSetReaderFactory.getResultSetReader(resultset, is, fsPath); - return new FileSplit(resultsetReader, resultset.resultSetType()); - } - - static FileSplit createResultSetFileSplit(FsPath fsPath, Fs fs) { - ResultSet resultset = ResultSetFactory.getInstance().getResultSetByPath(fsPath, fs); - ResultSetReader resultsetReader = null; - try { - resultsetReader = - ResultSetReaderFactory.getResultSetReader(resultset, fs.read(fsPath), fsPath); - } catch (IOException e) { - logger.warn("FileSource createResultSetFileSplit failed", e); - } - return new FileSplit(resultsetReader, resultset.resultSetType()); - } - - static FileSplit createTextFileSplit(FsPath fsPath, InputStream is) { - ScriptFsReader scriptFsReader = - ScriptFsReader.getScriptFsReader( - fsPath, StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue(), is); - return new FileSplit(scriptFsReader); - } - - static FileSplit createTextFileSplit(FsPath fsPath, Fs fs) { - ScriptFsReader scriptFsReader = null; - try { - scriptFsReader = - ScriptFsReader.getScriptFsReader( - fsPath, StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue(), fs.read(fsPath)); - } catch (IOException e) { - logger.warn("FileSource createTextFileSplit failed", e); - } - return new FileSplit(scriptFsReader); - } - - static boolean canRead(String path) { - return Arrays.stream(fileType).anyMatch(suffix -> path.endsWith("." + suffix)); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSplit.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSplit.java deleted file mode 100644 index 3a6c05a54a..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/FileSplit.java +++ /dev/null @@ -1,324 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.common.io.FsReader; -import org.apache.linkis.common.io.FsWriter; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; -import org.apache.linkis.storage.script.Parser; -import org.apache.linkis.storage.script.ScriptMetaData; -import org.apache.linkis.storage.script.Variable; -import org.apache.linkis.storage.script.VariableParser; -import org.apache.linkis.storage.script.reader.StorageScriptFsReader; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.math3.util.Pair; - -import java.io.Closeable; -import java.io.IOException; -import java.util.*; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class FileSplit implements Closeable { - private static final Logger logger = LoggerFactory.getLogger(FileSplit.class); - - private FsReader fsReader; - protected String type = "script/text"; - private int start = 0; - private int end = -1; - private int count = 0; - private int totalLine = 0; - protected Function shuffler; - private boolean pageTrigger = false; - protected Map params = new HashMap<>(); - private long limitBytes = 0L; - private int limitColumnLength = 0; - - public FileSplit(FsReader fsReader) { - this.fsReader = fsReader; - } - - public FileSplit(FsReader fsReader, String type) { - this.fsReader = fsReader; - this.type = type; - } - - public void page(int page, int pageSize) { - if (!pageTrigger) { - start = (page - 1) * pageSize; - end = pageSize * page - 1; - pageTrigger = true; - } - } - - public String getType() { - return type; - } - - public void addParams(Map params) { - this.params.putAll(params); - } - - public void addParams(String key, String value) { - this.params.put(key, value); - } - - public Map getParams() { - return params; - } - - public int getTotalLine() { - return totalLine; - } - - public void setLimitBytes(long limitBytes) { - this.limitBytes = limitBytes; - } - - public void setLimitColumnLength(int limitColumnLength) { - this.limitColumnLength = limitColumnLength; - } - - public M whileLoop(Function metaDataFunction, Consumer recordConsumer) { - M m = null; - try { - MetaData metaData = fsReader.getMetaData(); - m = metaDataFunction.apply(metaData); - if (pageTrigger) { - fsReader.skip(start); - } - count = start; - boolean hasRemovedFlag = false; - while (fsReader.hasNext() && ifContinueRead()) { - Record record = fsReader.getRecord(); - boolean needRemoveFlag = false; - if (!hasRemovedFlag && fsReader instanceof StorageScriptFsReader) { - Parser parser = ((StorageScriptFsReader) fsReader).getScriptParser(); - Variable[] meta = ((ScriptMetaData) metaData).getMetaData(); - if (meta != null - && meta.length > 0 - && parser != null - && parser.getAnnotationSymbol().equals(record.toString())) { - needRemoveFlag = true; - hasRemovedFlag = true; - } - } - if (!needRemoveFlag) { - recordConsumer.accept(shuffler.apply(record)); - totalLine++; - count++; - } - } - } catch (IOException e) { - logger.warn("FileSplit forEach failed", e); - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorMessage()); - } - return m; - } - - public void biConsumerWhileLoop( - Consumer metaDataFunction, Consumer recordConsumer) { - try { - MetaData metaData = fsReader.getMetaData(); - metaDataFunction.accept(metaData); - if (pageTrigger) { - fsReader.skip(start); - } - count = start; - boolean hasRemovedFlag = false; - while (fsReader.hasNext() && ifContinueRead()) { - Record record = fsReader.getRecord(); - boolean needRemoveFlag = false; - if (!hasRemovedFlag && fsReader instanceof StorageScriptFsReader) { - Parser parser = ((StorageScriptFsReader) fsReader).getScriptParser(); - Variable[] meta = ((ScriptMetaData) metaData).getMetaData(); - if (meta != null - && meta.length > 0 - && parser != null - && parser.getAnnotationSymbol().equals(record.toString())) { - needRemoveFlag = true; - hasRemovedFlag = true; - } - } - if (!needRemoveFlag) { - recordConsumer.accept(shuffler.apply(record)); - totalLine++; - count++; - } - } - } catch (IOException e) { - logger.warn("FileSplit forEach failed", e); - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorMessage()); - } - } - - public Pair getFileInfo(int needToCountRowNumber) { - int colNumber = 0; - int rowNumber = 0; - MetaData metaData = null; - try { - metaData = fsReader.getMetaData(); - colNumber = - metaData instanceof TableMetaData ? ((TableMetaData) metaData).getColumns().length : 1; - rowNumber = - needToCountRowNumber == -1 - ? fsReader.skip(Integer.MAX_VALUE) - : fsReader.skip(needToCountRowNumber); - } catch (IOException e) { - logger.warn("FileSplit getFileInfo failed", e); - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorMessage()); - } - return new Pair<>(colNumber, rowNumber); - } - - public void write(FsWriter fsWriter) { - biConsumerWhileLoop( - metaData -> { - try { - fsWriter.addMetaData(metaData); - } catch (IOException e) { - logger.warn("FileSplit addMetaData failed", e); - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorMessage()); - } - }, - record -> { - try { - fsWriter.addRecord(record); - } catch (IOException e) { - logger.warn("FileSplit addRecord failed", e); - throw new StorageWarnException( - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode(), - LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE.getErrorMessage()); - } - }); - } - - public Pair> collect() { - List recordList = new ArrayList<>(); - final AtomicLong tmpBytes = new AtomicLong(0L); - final AtomicBoolean overFlag = new AtomicBoolean(false); - Object metaData = - whileLoop( - collectMetaData -> collectMetaData(collectMetaData), - r -> { - if (!overFlag.get()) { - String[] arr = collectRecord(r); - if (limitBytes > 0) { - for (int i = 0; i < arr.length; i++) { - tmpBytes.addAndGet(arr[i].getBytes().length); - if (overFlag.get() || tmpBytes.get() > limitBytes) { - overFlag.set(true); - arr[i] = ""; - } - } - recordList.add(arr); - } else { - recordList.add(arr); - } - } - }); - return new Pair<>(metaData, recordList); - } - - public String[] collectRecord(Record record) { - if (record instanceof TableRecord) { - TableRecord tableRecord = (TableRecord) record; - if (limitColumnLength > 0) { - return Arrays.stream(tableRecord.row) - .map( - obj -> { - String col = DataType.valueToString(obj); - if (col.length() > limitColumnLength) { - return col.substring(0, limitColumnLength); - } else { - return col; - } - }) - .toArray(String[]::new); - } - return Arrays.stream(tableRecord.row).map(DataType::valueToString).toArray(String[]::new); - } else if (record instanceof LineRecord) { - LineRecord lineRecord = (LineRecord) record; - return new String[] {lineRecord.getLine()}; - } else { - throw new IllegalArgumentException("Unknown record type"); - } - } - - public Object collectMetaData(MetaData metaData) { - if (metaData instanceof ScriptMetaData) { - ScriptMetaData scriptMetaData = (ScriptMetaData) metaData; - return VariableParser.getMap(scriptMetaData.getMetaData()); - } else if (metaData instanceof LineMetaData) { - LineMetaData lineMetaData = (LineMetaData) metaData; - return lineMetaData.getMetaData(); - } else if (metaData instanceof TableMetaData) { - TableMetaData tableMetaData = (TableMetaData) metaData; - return Arrays.stream(tableMetaData.getColumns()) - .map(this::columnToMap) - .collect(Collectors.toList()); - } else { - throw new IllegalArgumentException("Unknown metadata type"); - } - } - - private Map columnToMap(Column column) { - Map stringMap = new HashMap<>(); - stringMap.put("columnName", column.getColumnName()); - stringMap.put("comment", column.getComment()); - stringMap.put("dataType", column.getDataType().getTypeName()); - return stringMap; - } - - public boolean ifContinueRead() { - return !pageTrigger || count <= end; - } - - public boolean ifStartRead() { - return !pageTrigger || count >= start; - } - - @Override - public void close() { - IOUtils.closeQuietly(fsReader); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java deleted file mode 100644 index 54fd64daad..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.storage.domain.Dolphin; -import org.apache.linkis.storage.resultset.table.TableRecord; -import org.apache.linkis.storage.utils.StorageUtils; - -import java.util.Arrays; - -public class ResultsetFileSource extends AbstractFileSource { - - public ResultsetFileSource(FileSplit[] fileSplits) { - super(fileSplits); - shuffle( - record -> { - if (record instanceof TableRecord) { - TableRecord tableRecord = (TableRecord) record; - String nullValue = getParams().getOrDefault("nullValue", "NULL"); - return new TableRecord( - Arrays.stream(tableRecord.row) - .map( - r -> { - if (r == null || r.equals("NULL")) { - if (nullValue.equals(Dolphin.LINKIS_NULL)) { - return r; - } else { - return nullValue; - } - } else if (r.equals("")) { - String emptyValue = getParams().getOrDefault("nullValue", ""); - if (emptyValue.equals(Dolphin.LINKIS_NULL)) { - return ""; - } else { - return emptyValue; - } - } else if (r instanceof Double) { - return StorageUtils.doubleToString((Double) r); - } else { - return r; - } - }) - .toArray()); - } else { - return record; - } - }); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/TextFileSource.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/TextFileSource.java deleted file mode 100644 index 7e5396bf74..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/TextFileSource.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.script.ScriptRecord; - -import org.apache.commons.math3.util.Pair; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; - -class TextFileSource extends AbstractFileSource { - public static final String[] LINE_BREAKER = new String[] {"\n"}; - - public TextFileSource(FileSplit[] fileSplits) { - super(fileSplits); - shuffle( - record -> { - if (record instanceof ScriptRecord && "".equals(((ScriptRecord) record).getLine())) { - return new LineRecord("\n"); - } else { - return record; - } - }); - } - - @Override - public Pair>[] collect() { - Pair>[] collects = super.collect(); - if (!getParams().getOrDefault("ifMerge", "true").equals("true")) { - return collects; - } - ArrayList> snds = - Arrays.stream(collects) - .map(Pair::getSecond) - .collect(Collectors.toCollection(ArrayList::new)); - snds.forEach( - snd -> { - StringBuilder str = new StringBuilder(); - snd.forEach( - arr -> { - if (Arrays.equals(arr, LINE_BREAKER)) { - str.append("\n"); - } else { - str.append(arr[0]).append("\n"); - } - }); - snd.clear(); - snd.add(new String[] {str.toString()}); - }); - return collects; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java deleted file mode 100644 index 4c50479637..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.utils; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.storage.FSFactory; -import org.apache.linkis.storage.fs.FileSystem; -import org.apache.linkis.storage.fs.impl.LocalFileSystem; - -import org.apache.commons.io.IOUtils; - -import java.io.IOException; -import java.util.Objects; -import java.util.Stack; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class FileSystemUtils { - private static final Logger logger = LoggerFactory.getLogger(FileSystemUtils.class); - - public static void copyFile(FsPath filePath, FsPath origin, String user) throws Exception { - FileSystem fileSystem = (FileSystem) FSFactory.getFsByProxyUser(filePath, user); - try { - fileSystem.init(null); - if (!fileSystem.exists(filePath)) { - if (!fileSystem.exists(filePath.getParent())) { - fileSystem.mkdirs(filePath.getParent()); - } - fileSystem.createNewFile(filePath); - } - fileSystem.copyFile(origin, filePath); - } finally { - IOUtils.closeQuietly(fileSystem); - } - } - - /** - * Create a new file - * - * @param filePath - * @param createParentWhenNotExists Whether to recursively create a directory - */ - public static void createNewFile(FsPath filePath, boolean createParentWhenNotExists) - throws Exception { - createNewFile(filePath, StorageUtils.getJvmUser(), createParentWhenNotExists); - } - - public static void createNewFile(FsPath filePath, String user, boolean createParentWhenNotExists) - throws Exception { - FileSystem fileSystem = (FileSystem) FSFactory.getFsByProxyUser(filePath, user); - try { - fileSystem.init(null); - createNewFileWithFileSystem(fileSystem, filePath, user, createParentWhenNotExists); - } finally { - IOUtils.closeQuietly(fileSystem); - } - } - - public static void createNewFileWithFileSystem( - FileSystem fileSystem, FsPath filePath, String user, boolean createParentWhenNotExists) - throws Exception { - if (!fileSystem.exists(filePath)) { - if (!fileSystem.exists(filePath.getParent())) { - if (!createParentWhenNotExists) { - throw new IOException( - "parent dir " + filePath.getParent().getPath() + " dose not exists."); - } - mkdirs(fileSystem, filePath.getParent(), user); - } - fileSystem.createNewFile(filePath); - if (fileSystem instanceof LocalFileSystem) { - fileSystem.setOwner(filePath, user); - } else { - logger.info("doesn't need to call setOwner"); - } - } - } - - /** - * create new file and set file owner by FileSystem - * - * @param fileSystem - * @param filePath - * @param user - * @param createParentWhenNotExists - */ - public static void createNewFileAndSetOwnerWithFileSystem( - FileSystem fileSystem, FsPath filePath, String user, boolean createParentWhenNotExists) - throws Exception { - if (!fileSystem.exists(filePath)) { - if (!fileSystem.exists(filePath.getParent())) { - if (!createParentWhenNotExists) { - throw new IOException( - "parent dir " + filePath.getParent().getPath() + " dose not exists."); - } - mkdirs(fileSystem, filePath.getParent(), user); - } - fileSystem.createNewFile(filePath); - fileSystem.setOwner(filePath, user); - } - } - - /** - * Recursively create a directory - * - * @param fileSystem - * @param dest - * @param user - * @throws IOException - * @return - */ - public static boolean mkdirs(FileSystem fileSystem, FsPath dest, String user) throws IOException { - FsPath parentPath = dest.getParent(); - Stack dirsToMake = new Stack<>(); - dirsToMake.push(dest); - while (!fileSystem.exists(parentPath)) { - dirsToMake.push(parentPath); - - if (Objects.isNull(parentPath.getParent())) { - // parent path of root is null - break; - } - - parentPath = parentPath.getParent(); - } - if (!fileSystem.canExecute(parentPath)) { - throw new IOException("You have not permission to access path " + dest.getPath()); - } - while (!dirsToMake.empty()) { - FsPath path = dirsToMake.pop(); - fileSystem.mkdir(path); - if (fileSystem instanceof LocalFileSystem) { - fileSystem.setOwner(path, user); - } else { - logger.info("doesn't need to call setOwner"); - } - } - return true; - } - - /** - * Recursively create a directory(递归创建目录) add owner info - * - * @param fileSystem - * @param dest - * @param user - * @throws IOException - * @return - */ - public static boolean mkdirsAndSetOwner(FileSystem fileSystem, FsPath dest, String user) - throws IOException { - FsPath parentPath = dest.getParent(); - Stack dirsToMake = new Stack<>(); - dirsToMake.push(dest); - while (!fileSystem.exists(parentPath)) { - dirsToMake.push(parentPath); - - if (Objects.isNull(parentPath.getParent())) { - // parent path of root is null - break; - } - - parentPath = parentPath.getParent(); - } - if (!fileSystem.canExecute(parentPath)) { - throw new IOException("You have not permission to access path " + dest.getPath()); - } - while (!dirsToMake.empty()) { - FsPath path = dirsToMake.pop(); - fileSystem.mkdir(path); - fileSystem.setOwner(path, user); - } - return true; - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/OrcUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/OrcUtils.java deleted file mode 100644 index 72c2f8c355..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/OrcUtils.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.utils; - -import org.apache.linkis.storage.domain.DataType; - -import org.apache.orc.TypeDescription; -import org.apache.orc.storage.common.type.HiveDecimal; -import org.apache.orc.storage.ql.exec.vector.*; - -import java.math.BigDecimal; -import java.sql.Timestamp; -import java.text.ParseException; -import java.text.SimpleDateFormat; - -/** - * Inspired by: - * https://github.com/apache/flink/blob/master/flink-formats/flink-orc/src/main/java/org/apache/flink/orc/OrcSplitReaderUtil.java - */ -public class OrcUtils { - - public static TypeDescription dataTypeToOrcType(DataType type) { - switch (type) { - case CharType: - return TypeDescription.createChar().withMaxLength(1024); - case StringType: - return TypeDescription.createString(); - case LongType: - return TypeDescription.createLong(); - case VarcharType: - return TypeDescription.createVarchar().withMaxLength(1024); - case BooleanType: - return TypeDescription.createBoolean(); - case BinaryType: - return TypeDescription.createBinary(); - case DecimalType: - return TypeDescription.createDecimal().withScale(10).withPrecision(38); - case TinyIntType: - return TypeDescription.createByte(); - case ShortIntType: - return TypeDescription.createShort(); - case IntType: - return TypeDescription.createInt(); - case BigIntType: - return TypeDescription.createLong(); - case FloatType: - return TypeDescription.createFloat(); - case DoubleType: - return TypeDescription.createDouble(); - case DateType: - return TypeDescription.createDate(); - case TimestampType: - return TypeDescription.createTimestamp(); - case ArrayType: - return TypeDescription.createList(dataTypeToOrcType(DataType.VarcharType)); - case MapType: - return TypeDescription.createMap( - dataTypeToOrcType(DataType.VarcharType), dataTypeToOrcType(DataType.VarcharType)); - default: - throw new UnsupportedOperationException("Unsupported type: " + type); - } - } - - public static void setColumn(int columnId, ColumnVector column, DataType type, Object value) { - switch (type) { - case CharType: - case VarcharType: - case BinaryType: - case StringType: - { - BytesColumnVector vector = (BytesColumnVector) column; - vector.setVal(columnId, String.valueOf(value).getBytes()); - break; - } - case BooleanType: - { - LongColumnVector vector = (LongColumnVector) column; - vector.vector[columnId] = Boolean.valueOf(value.toString()) ? 1 : 0; - break; - } - case DecimalType: - { - DecimalColumnVector vector = (DecimalColumnVector) column; - vector.set(columnId, HiveDecimal.create(new BigDecimal(value.toString()))); - break; - } - case TinyIntType: - { - LongColumnVector vector = (LongColumnVector) column; - vector.vector[columnId] = (byte) value; - break; - } - case DateType: - case IntType: - { - LongColumnVector vector = (LongColumnVector) column; - vector.vector[columnId] = Integer.valueOf(value.toString()); - break; - } - case BigIntType: - { - LongColumnVector vector = (LongColumnVector) column; - vector.vector[columnId] = Long.valueOf(value.toString()); - break; - } - case FloatType: - { - DoubleColumnVector vector = (DoubleColumnVector) column; - vector.vector[columnId] = Float.valueOf(value.toString()); - break; - } - case DoubleType: - { - DoubleColumnVector vector = (DoubleColumnVector) column; - vector.vector[columnId] = Double.valueOf(value.toString()); - break; - } - case TimestampType: - { - TimestampColumnVector vector = (TimestampColumnVector) column; - try { - vector.set( - columnId, - new Timestamp( - new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ") - .parse(value.toString()) - .getTime())); - } catch (ParseException e) { - vector.set(columnId, new Timestamp(System.currentTimeMillis())); - } - break; - } - default: - throw new UnsupportedOperationException("Unsupported type: " + type); - } - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageConfiguration.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageConfiguration.java deleted file mode 100644 index 70a3839b62..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageConfiguration.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.utils; - -import org.apache.linkis.common.conf.ByteType; -import org.apache.linkis.common.conf.CommonVars; - -import java.util.List; - -import com.google.common.collect.Lists; - -public class StorageConfiguration { - - public static CommonVars PROXY_USER = - new CommonVars<>("wds.linkis.storage.proxy.user", "${UM}", null, null); - - public static CommonVars STORAGE_ROOT_USER = - new CommonVars<>("wds.linkis.storage.root.user", "hadoop", null, null); - - public static CommonVars HDFS_ROOT_USER = - new CommonVars<>("wds.linkis.storage.hdfs.root.user", "hadoop", null, null); - - public static CommonVars LOCAL_ROOT_USER = - new CommonVars<>("wds.linkis.storage.local.root.user", "root", null, null); - - public static CommonVars STORAGE_USER_GROUP = - new CommonVars<>("wds.linkis.storage.fileSystem.group", "bdap", null, null); - - public static CommonVars STORAGE_RS_FILE_TYPE = - new CommonVars<>("wds.linkis.storage.rs.file.type", "utf-8", null, null); - - public static CommonVars STORAGE_RS_FILE_SUFFIX = - new CommonVars<>("wds.linkis.storage.rs.file.suffix", ".dolphin", null, null); - - public static CommonVars LINKIS_STORAGE_FS_LABEL = - new CommonVars<>("linkis.storage.default.fs.label", "linkis-storage", null, null); - - public static List ResultTypes = - Lists.newArrayList("%TEXT", "%TABLE", "%HTML", "%IMG", "%ANGULAR", "%SVG"); - - public static CommonVars STORAGE_RESULT_SET_PACKAGE = - new CommonVars<>( - "wds.linkis.storage.result.set.package", - "org.apache.linkis.storage.resultset", - null, - null); - - public static CommonVars STORAGE_RESULT_SET_CLASSES = - new CommonVars<>( - "wds.linkis.storage.result.set.classes", - "txt.TextResultSet,table.TableResultSet,io.IOResultSet,html.HtmlResultSet,picture.PictureResultSet", - null, - null); - - public static CommonVars STORAGE_BUILD_FS_CLASSES = - new CommonVars<>( - "wds.linkis.storage.build.fs.classes", - "org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem," - + "org.apache.linkis.storage.factory.impl.BuildOSSSystem,org.apache.linkis.storage.factory.impl.BuildS3FileSystem", - null, - null); - - public static CommonVars IS_SHARE_NODE = - new CommonVars<>("wds.linkis.storage.is.share.node", true, null, null); - - public static CommonVars ENABLE_IO_PROXY = - new CommonVars<>("wds.linkis.storage.enable.io.proxy", false, null, null); - - public static CommonVars IO_USER = - new CommonVars<>("wds.linkis.storage.io.user", "root", null, null); - public static CommonVars IO_FS_EXPIRE_TIME = - new CommonVars<>("wds.linkis.storage.io.fs.num", 1000 * 60 * 10, null, null); - - public static CommonVars IO_PROXY_READ_FETCH_SIZE = - new CommonVars<>("wds.linkis.storage.io.read.fetch.size", new ByteType("100k"), null, null); - - public static CommonVars IO_PROXY_WRITE_CACHE_SIZE = - new CommonVars<>("wds.linkis.storage.io.write.cache.size", new ByteType("64k"), null, null); - - public static CommonVars IO_DEFAULT_CREATOR = - new CommonVars<>("wds.linkis.storage.io.default.creator", "IDE", null, null); - public static CommonVars IO_FS_RE_INIT = - new CommonVars<>("wds.linkis.storage.io.fs.re.init", "re-init", null, null); - - public static CommonVars IO_INIT_RETRY_LIMIT = - new CommonVars<>("wds.linkis.storage.io.init.retry.limit", 10, null, null); - - public static CommonVars STORAGE_HDFS_GROUP = - new CommonVars<>("wds.linkis.storage.fileSystem.hdfs.group", "hadoop", null, null); - - public static CommonVars DOUBLE_FRACTION_LEN = - new CommonVars<>("wds.linkis.double.fraction.length", 30, null, null); - - public static CommonVars HDFS_PATH_PREFIX_CHECK_ON = - new CommonVars<>("wds.linkis.storage.hdfs.prefix_check.enable", true, null, null); - - public static CommonVars HDFS_PATH_PREFIX_REMOVE = - new CommonVars<>("wds.linkis.storage.hdfs.prefxi.remove", true, null, null); - - public static CommonVars FS_CACHE_DISABLE = - new CommonVars<>("wds.linkis.fs.hdfs.impl.disable.cache", false, null, null); - - public static CommonVars FS_CHECKSUM_DISBALE = - new CommonVars<>("linkis.fs.hdfs.impl.disable.checksum", false, null, null); - - /** - * more arguments please refer to: - * https://hadoop.apache.org/docs/stable/hadoop-aliyun/tools/hadoop-aliyun/index.html Aliyun OSS - * endpoint to connect to. eg: https://oss-cn-hangzhou.aliyuncs.com - */ - public static CommonVars OSS_ENDPOINT = - new CommonVars("wds.linkis.fs.oss.endpoint", "", null, null); - - /** Aliyun bucket name eg: benchmark2 */ - public static CommonVars OSS_ACCESS_BUCKET_NAME = - new CommonVars("wds.linkis.fs.oss.bucket.name", "", null, null); - - /** Aliyun access key ID */ - public static CommonVars OSS_ACCESS_KEY_ID = - new CommonVars("wds.linkis.fs.oss.accessKeyId", "", null, null); - - /** Aliyun access key secret */ - public static CommonVars OSS_ACCESS_KEY_SECRET = - new CommonVars("wds.linkis.fs.oss.accessKeySecret", "", null, null); - - public static CommonVars OSS_PATH_PREFIX_CHECK_ON = - new CommonVars("wds.linkis.storage.oss.prefix_check.enable", false, null, null); - - public static CommonVars OSS_PATH_PREFIX_REMOVE = - new CommonVars("wds.linkis.storage.oss.prefix.remove", true, null, null); - - public static CommonVars S3_ACCESS_KEY = - new CommonVars("linkis.storage.s3.access.key", "", null, null); - - public static CommonVars S3_SECRET_KEY = - new CommonVars("linkis.storage.s3.secret.key", "", null, null); - - public static CommonVars S3_ENDPOINT = - new CommonVars("linkis.storage.s3.endpoint", "", null, null); - - public static CommonVars S3_REGION = - new CommonVars("linkis.storage.s3.region", "", null, null); - - public static CommonVars S3_BUCKET = - new CommonVars("linkis.storage.s3.bucket", "", null, null); -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageHelper.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageHelper.java deleted file mode 100644 index 491c3d7af4..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageHelper.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.utils; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.storage.FSFactory; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.ResultSetReaderFactory; -import org.apache.linkis.storage.resultset.table.TableMetaData; -import org.apache.linkis.storage.resultset.table.TableRecord; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import java.io.IOException; -import java.util.Arrays; - -/** - * 工具类,用于做storage jar包打出来做测试用 Tool class, which is used to print the storage jar package for testing - */ -public class StorageHelper { - private static final Log logger = LogFactory.getLog(StorageHelper.class); - - public static void main(String[] args) { - if (args.length < 2) logger.info("Usage method params eg:getTableResLines path"); - String method = args[0]; - String[] params = Arrays.copyOfRange(args, 1, args.length); - try { - Thread.sleep(10000L); - } catch (InterruptedException e) { - } - - switch (method) { - case "getTableResLines": - getTableResLines(params); - break; - case "getTableRes": - getTableRes(params); - break; - case "createNewFile": - createNewFile(params); - break; - default: - logger.info("There is no such method"); - } - } - - /** - * Get the number of table result set file lines(获得表格结果集文件行数) - * - * @param args - */ - public static void getTableResLines(String[] args) { - ResultSetReader resultSetReader = null; - try { - FsPath resPath = StorageUtils.getFsPath(args[0]); - ResultSetFactory resultSetFactory = ResultSetFactory.getInstance(); - - ResultSet resultSet = - resultSetFactory.getResultSetByType(ResultSetFactory.TABLE_TYPE); - Fs fs = FSFactory.getFs(resPath); - fs.init(null); - resultSetReader = - ResultSetReaderFactory.getResultSetReader(resultSet, fs.read(resPath), resPath); - TableMetaData metaData = (TableMetaData) resultSetReader.getMetaData(); - Arrays.stream(metaData.getColumns()).forEach(column -> logger.info(column.toString())); - int num = 0; - Thread.sleep(10000L); - while (resultSetReader.hasNext()) { - resultSetReader.getRecord(); - num++; - } - logger.info(Integer.toString(num)); - } catch (Exception e) { - logger.error("getTableResLines error:", e); - } finally { - if (resultSetReader != null) { - try { - resultSetReader.close(); - } catch (IOException e) { - logger.error("Failed to close ResultSetReader", e); - } - } - } - } - - public static void getTableRes(String[] args) { - try { - int len = Integer.parseInt(args[1]); - int max = len + 10; - FsPath resPath = StorageUtils.getFsPath(args[0]); - ResultSetFactory resultSetFactory = ResultSetFactory.getInstance(); - ResultSet resultSet = - resultSetFactory.getResultSetByType(ResultSetFactory.TABLE_TYPE); - Fs fs = FSFactory.getFs(resPath); - - fs.init(null); - - ResultSetReader reader = - ResultSetReaderFactory.getResultSetReader(resultSet, fs.read(resPath), resPath); - MetaData rmetaData = reader.getMetaData(); - Arrays.stream(((TableMetaData) rmetaData).getColumns()) - .forEach(column -> logger.info(column.toString())); - Arrays.stream(((TableMetaData) rmetaData).getColumns()) - .map(column -> column.getColumnName() + ",") - .forEach(column -> logger.info(column)); - int num = 0; - while (reader.hasNext()) { - num++; - if (num > max) return; - if (num > len) { - Record record = reader.getRecord(); - Arrays.stream(((TableRecord) record).row) - .forEach( - value -> { - logger.info(value.toString()); - logger.info(","); - }); - logger.info("\n"); - } - } - } catch (IOException e) { - logger.warn("StorageHelper getTableRes failed", e); - } - } - - public static void createNewFile(String[] args) { - FsPath resPath = StorageUtils.getFsPath(args[0]); - String proxyUser = StorageUtils.getJvmUser(); - try { - FileSystemUtils.createNewFile(resPath, proxyUser, true); - } catch (Exception e) { - logger.warn("StorageHelper createNewFile failed", e); - } - logger.info("success"); - } -} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java deleted file mode 100644 index 07bc0510bc..0000000000 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java +++ /dev/null @@ -1,279 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.utils; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.common.io.resultset.ResultSetReader; -import org.apache.linkis.common.io.resultset.ResultSetWriter; -import org.apache.linkis.common.utils.Utils; -import org.apache.linkis.hadoop.common.conf.HadoopConf; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.exception.StorageWarnException; -import org.apache.linkis.storage.resultset.ResultSetFactory; -import org.apache.linkis.storage.resultset.ResultSetReaderFactory; -import org.apache.linkis.storage.resultset.ResultSetWriterFactory; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.curator.utils.CloseableUtils; - -import java.io.*; -import java.lang.reflect.Method; -import java.text.NumberFormat; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Stream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.CONFIGURATION_NOT_READ; - -public class StorageUtils { - private static final Logger logger = LoggerFactory.getLogger(StorageUtils.class); - - public static final String HDFS = "hdfs"; - public static final String FILE = "file"; - public static final String OSS = "oss"; - public static final String S3 = "s3"; - - public static final String FILE_SCHEMA = "file://"; - public static final String HDFS_SCHEMA = "hdfs://"; - public static final String OSS_SCHEMA = "oss://"; - public static final String S3_SCHEMA = "s3://"; - - private static final NumberFormat nf = NumberFormat.getInstance(); - - static { - nf.setGroupingUsed(false); - nf.setMaximumFractionDigits((int) StorageConfiguration.DOUBLE_FRACTION_LEN.getValue()); - } - - public static String doubleToString(double value) { - return nf.format(value); - } - - public static Map loadClass(String classStr, Function op) { - String[] _classes = classStr.split(","); - LinkedHashMap classes = new LinkedHashMap<>(); - for (String clazz : _classes) { - try { - T obj = Utils.getClassInstance(clazz.trim()); - classes.put(op.apply(obj), obj); - } catch (Exception e) { - logger.warn("StorageUtils loadClass failed", e); - } - } - return classes; - } - - public static Map> loadClasses( - String classStr, String pge, Function, String> op) { - String[] _classes = - StringUtils.isEmpty(pge) - ? classStr.split(",") - : Stream.of(StringUtils.split(classStr, ',')) - .map(value -> pge + "." + value) - .toArray(String[]::new); - Map> classes = new LinkedHashMap<>(); - for (String clazz : _classes) { - try { - Class _class = - (Class) Thread.currentThread().getContextClassLoader().loadClass(clazz.trim()); - classes.put(op.apply(_class), _class); - } catch (Exception e) { - logger.warn("StorageUtils loadClasses failed", e); - } - } - return classes; - } - - public static String pathToSuffix(String path) { - String fileName = new File(path).getName(); - if (fileName.length() > 0) { - int dot = fileName.lastIndexOf('.'); - if (dot > -1 && dot < fileName.length() - 1) { - return fileName.substring(dot + 1); - } - } - return fileName; - } - - public static Object invoke(Object obj, Method method, Object[] args) throws Exception { - return method.invoke(obj, args); - } - - /** - * Serialized string is a result set of type Text(序列化字符串为Text类型的结果集) - * - * @param value - * @return - */ - public static String serializerStringToResult(String value) throws IOException { - ResultSet resultSet = - ResultSetFactory.getInstance().getResultSetByType(ResultSetFactory.TEXT_TYPE); - ResultSetWriter writer = - ResultSetWriterFactory.getResultSetWriter(resultSet, Long.MAX_VALUE, null); - LineMetaData metaData = new LineMetaData(null); - LineRecord record = new LineRecord(value); - writer.addMetaData(metaData); - writer.addRecord(record); - String res = writer.toString(); - IOUtils.closeQuietly(writer); - return res; - } - - /** - * The result set of serialized text is a string(序列化text的结果集为字符串) - * - * @param result - * @return - */ - public static String deserializerResultToString(String result) throws IOException { - ResultSet resultSet = - ResultSetFactory.getInstance().getResultSetByType(ResultSetFactory.TEXT_TYPE); - ResultSetReader reader = ResultSetReaderFactory.getResultSetReader(resultSet, result); - StringBuilder sb = new StringBuilder(); - while (reader.hasNext()) { - LineRecord record = (LineRecord) reader.getRecord(); - sb.append(record.getLine()); - } - reader.close(); - return sb.toString(); - } - - public static void close(OutputStream outputStream) { - close(outputStream, null, null); - } - - public static void close(InputStream inputStream) { - close(null, inputStream, null); - } - - public static void close(Fs fs) { - close(null, null, fs); - } - - public static void close(OutputStream outputStream, InputStream inputStream, Fs fs) { - try { - if (outputStream != null) outputStream.close(); - } catch (IOException e) { - // ignore exception - } - try { - if (inputStream != null) inputStream.close(); - } catch (IOException e) { - // ignore exception - } - try { - if (fs != null) fs.close(); - } catch (IOException e) { - // ignore exception - } - } - - public static void close(Closeable closeable) { - CloseableUtils.closeQuietly(closeable); - } - - public static String getJvmUser() { - return System.getProperty("user.name"); - } - - public static boolean isHDFSNode() { - File confPath = new File(HadoopConf.hadoopConfDir()); - // TODO IO-client mode need return false - if (!confPath.exists() || confPath.isFile()) { - throw new StorageWarnException( - CONFIGURATION_NOT_READ.getErrorCode(), CONFIGURATION_NOT_READ.getErrorDesc()); - } else return true; - } - - /** - * Returns the FsPath by determining whether the path is a schema. By default, the FsPath of the - * file is returned. - * - * @param path - * @return - */ - public static FsPath getFsPath(String path) { - if (path.startsWith(FILE_SCHEMA) || path.startsWith(HDFS_SCHEMA)) { - return new FsPath(path); - } else { - return new FsPath(FILE_SCHEMA + path); - } - } - - public static int readBytes(InputStream inputStream, byte[] bytes, int len) { - int readLen = 0; - try { - int count = 0; - // 当使用s3存储结果文件时时,com.amazonaws.services.s3.model.S3InputStream无法正确读取.dolphin文件。需要在循环条件添加: - // readLen >= 0 - // To resolve the issue when using S3 to store result files and - // com.amazonaws.services.s3.model.S3InputStream to read .dolphin files, you need to add the - // condition readLen >= 0 in the loop. - while (readLen < len && readLen >= 0) { - count = inputStream.read(bytes, readLen, len - readLen); - - if (count == -1 && inputStream.available() < 1) { - return readLen; - } - readLen += count; - } - } catch (IOException e) { - logger.warn("FileSystemUtils readBytes failed", e); - } - return readLen; - } - - public static String colToString(Object col, String nullValue) { - if (col == null) { - return nullValue; - } else if (col instanceof Double) { - return doubleToString((Double) col); - } else if ("NULL".equals(col) || "".equals(col)) { - return nullValue; - } else { - return col.toString(); - } - } - - public static String colToString(Object col) { - return colToString(col, "NULL"); - } - - public static boolean isIOProxy() { - return (boolean) StorageConfiguration.ENABLE_IO_PROXY.getValue(); - } - - public static byte[] mergeByteArrays(byte[] arr1, byte[] arr2) { - byte[] mergedArray = new byte[arr1.length + arr2.length]; - System.arraycopy(arr1, 0, mergedArray, 0, arr1.length); - System.arraycopy(arr2, 0, mergedArray, arr1.length, arr2.length); - return mergedArray; - } - - public static boolean isHDFSPath(FsPath fsPath) { - return HDFS.equalsIgnoreCase(fsPath.getFsType()); - } -} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala new file mode 100644 index 0000000000..5968332e10 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/FSFactory.scala @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage + +import org.apache.linkis.common.io.{Fs, FsPath} +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_FILE +import org.apache.linkis.storage.exception.StorageFatalException +import org.apache.linkis.storage.factory.BuildFactory +import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} + +import java.text.MessageFormat + +object FSFactory extends Logging { + + private val buildClasses: Map[String, BuildFactory] = StorageUtils.loadClass[BuildFactory]( + StorageConfiguration.STORAGE_BUILD_FS_CLASSES.getValue, + t => t.fsName() + ) + + def getBuildFactory(fsName: String): BuildFactory = { + if (!buildClasses.contains(fsName)) { + throw new StorageFatalException( + UNSUPPORTED_FILE.getErrorCode, + MessageFormat.format(UNSUPPORTED_FILE.getErrorDesc, fsName) + ) + } + buildClasses(fsName) + } + + def getFs(fsType: String, proxyUser: String): Fs = { + val user = StorageUtils.getJvmUser + getBuildFactory(fsType).getFs(user, proxyUser) + } + + def getFs(fsType: String): Fs = { + val user = StorageUtils.getJvmUser + getBuildFactory(fsType).getFs(user, user) + } + + /** + * 1. If this machine has shared storage, the file:// type FS obtained here is the FS of the + * process user. 2, if this machine does not have shared storage, then the file:// type FS + * obtained is the proxy to the Remote (shared storage machine root) FS 3. If it is HDFS, it + * returns the FS of the process user. 1、如果这台机器装有共享存储则这里获得的file://类型的FS为该进程用户的FS + * 2、如果这台机器没有共享存储则获得的file://类型的FS为代理到Remote(共享存储机器root)的FS 3、如果是HDFS则返回的就是该进程用户的FS + * @param fsPath + * @return + */ + def getFs(fsPath: FsPath): Fs = { + getFs(fsPath.getFsType()) + } + + /** + * 1. If the process user is passed and the proxy user and the process user are consistent, the + * file:// type FS is the FS of the process user (the shared storage exists) 2, if the + * process user is passed and the proxy user and the process user are consistent and there is + * no shared storage, the file:// type FS is the proxy to the remote (shared storage machine + * root) FS 3. If the passed proxy user and process user are consistent, the hdfs type is the + * FS of the process user. 4. If the proxy user and the process user are inconsistent, the + * hdfs type is the FS after the proxy. 1、如果传了进程用户且代理用户和进程用户一致则file://类型的FS为该进程用户的FS(存在共享存储) + * 2、如果传了进程用户且代理用户和进程用户一致且没有共享存储则file://类型的FS为代理到Remote(共享存储机器root)的FS + * 3、如果传了的代理用户和进程用户一致则hdfs类型为该进程用户的FS 4、如果传了代理用户和进程用户不一致则hdfs类型为代理后的FS + * + * @param fsPath + * @param proxyUser + * @return + */ + def getFsByProxyUser(fsPath: FsPath, proxyUser: String): Fs = { + getFs(fsPath.getFsType(), proxyUser) + } + + def getFSByLabel(fs: String, label: String): Fs = { + val user = StorageUtils.getJvmUser + getBuildFactory(fs).getFs(user, user, label) + } + + def getFSByLabelAndUser(fs: String, label: String, proxy: String): Fs = { + val user = StorageUtils.getJvmUser + getBuildFactory(fs).getFs(user, proxy, label) + } + +} diff --git a/linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientFactoryTest.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineMetaData.scala similarity index 69% rename from linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientFactoryTest.scala rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineMetaData.scala index c284a14c33..e6c9df4548 100644 --- a/linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientFactoryTest.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineMetaData.scala @@ -15,22 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.filesystem +package org.apache.linkis.storage -import org.junit.jupiter.api.{Assertions, DisplayName, Test} +import org.apache.linkis.common.io.MetaData +import org.apache.linkis.storage.resultset.ResultMetaData -class WorkspaceClientFactoryTest { +class LineMetaData(private var metaData: String = null) extends ResultMetaData { - @Test - @DisplayName("getClientTest") - def getClientTest(): Unit = { + def getMetaData: String = metaData - val user = "hadoop" - val token = "abc" - val gatewayAddress = "127.0.0.1:9001" - - val client = WorkspaceClientFactory.getClient(user, token, gatewayAddress) - Assertions.assertNotNull(client) + def setMetaData(metaData: String): Unit = { + this.metaData = metaData } + override def cloneMeta(): MetaData = new LineMetaData(metaData) } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/operator/OperatorFactory.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineRecord.scala similarity index 67% rename from linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/operator/OperatorFactory.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineRecord.scala index e4d02f945b..d6e3220cf2 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/operator/OperatorFactory.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/LineRecord.scala @@ -15,19 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.manager.common.operator; +package org.apache.linkis.storage -import java.util.Map; +import org.apache.linkis.common.io.Record +import org.apache.linkis.storage.resultset.ResultRecord -public interface OperatorFactory { +class LineRecord(private var line: String) extends ResultRecord { - String getOperatorName(Map parameters); + def getLine: String = line - Operator getOperatorRequest(Map parameters); + def setLine(line: String): Unit = { + this.line = line + } - OperatorFactory operatorFactory = new OperatorFactoryImpl(); + override def cloneRecord(): Record = new LineRecord(line) - static OperatorFactory apply() { - return operatorFactory; - } + override def toString: String = line } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala new file mode 100644 index 0000000000..5330983dd6 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.conf + +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.ByteTimeUtils + +import org.apache.commons.lang3.StringUtils + +object LinkisStorageConf { + private val CONF_LOCK = new Object() + + val enableLimitThreadLocal: ThreadLocal[String] = new ThreadLocal[String] + + val columnIndicesThreadLocal: ThreadLocal[Array[Int]] = new ThreadLocal[Array[Int]] + + val HDFS_FILE_SYSTEM_REST_ERRS: String = + CommonVars + .apply( + "wds.linkis.hdfs.rest.errs", + ".*Filesystem closed.*|.*Failed to find any Kerberos tgt.*" + ) + .getValue + + val ROW_BYTE_MAX_LEN_STR = CommonVars("wds.linkis.resultset.row.max.str", "2m").getValue + + val ROW_BYTE_MAX_LEN = ByteTimeUtils.byteStringAsBytes(ROW_BYTE_MAX_LEN_STR) + + val FILE_TYPE = CommonVars( + "wds.linkis.storage.file.type", + "dolphin,sql,scala,py,hql,python,out,log,text,sh,jdbc,ngql,psql,fql,tsql,txt" + ).getValue + + private var fileTypeArr: Array[String] = null + + private def fileTypeArrParser(fileType: String): Array[String] = { + if (StringUtils.isBlank(fileType)) Array() + else fileType.split(",") + } + + def getFileTypeArr: Array[String] = { + if (fileTypeArr == null) { + CONF_LOCK.synchronized { + if (fileTypeArr == null) { + fileTypeArr = fileTypeArrParser(FILE_TYPE) + } + } + } + fileTypeArr + } + + val LINKIS_RESULT_ENABLE_NULL = CommonVars("linkis.resultset.enable.null.replace", false).getValue + + val LINKIS_RESULT_COLUMN_SIZE = + CommonVars("linkis.resultset.column.size.max", Int.MaxValue).getValue + + val LINKIS_RESULT_COL_LENGTH = + CommonVars("linkis.resultset.col.length.max", Int.MaxValue).getValue + + val LINKIS__READ_RESULT_ROW_MAX_LEN_STR = + CommonVars("linkis.resultset.read.row.max.str", "20m").getValue + + val LINKIS_READ_ROW_BYTE_MAX_LEN = + ByteTimeUtils.byteStringAsBytes(LINKIS__READ_RESULT_ROW_MAX_LEN_STR) + +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/entity/ResourceOperationType.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsReader.scala similarity index 85% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/entity/ResourceOperationType.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsReader.scala index c1a4f211b4..d40d041a3e 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/entity/ResourceOperationType.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsReader.scala @@ -15,11 +15,8 @@ * limitations under the License. */ -package org.apache.linkis.manager.rm.entity; +package org.apache.linkis.storage.csv -public enum ResourceOperationType { - LOCK, - USED, - USED_RELEASE, - LOCKER_RELEASE -} +import org.apache.linkis.common.io.FsReader + +abstract class CSVFsReader extends FsReader {} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala new file mode 100644 index 0000000000..93610a7db2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/CSVFsWriter.scala @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.csv + +import org.apache.linkis.common.io.FsWriter + +import java.io.OutputStream + +abstract class CSVFsWriter extends FsWriter { + val charset: String + val separator: String + val quoteRetouchEnable: Boolean +} + +object CSVFsWriter { + + def getCSVFSWriter( + charset: String, + separator: String, + quoteRetouchEnable: Boolean, + outputStream: OutputStream + ): CSVFsWriter = new StorageCSVWriter(charset, separator, quoteRetouchEnable, outputStream) + + def getCSVFSWriter( + charset: String, + separator: String, + quoteRetouchEnable: Boolean, + outputStream: OutputStream, + keepNewline: Boolean + ): CSVFsWriter = + new StorageCSVWriter(charset, separator, quoteRetouchEnable, outputStream, keepNewline) + +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/AlertUtils.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVReader.scala similarity index 91% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/AlertUtils.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVReader.scala index a06dfaab18..6fc91c836b 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/AlertUtils.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVReader.scala @@ -15,6 +15,6 @@ * limitations under the License. */ -package org.apache.linkis.manager.rm.utils; +package org.apache.linkis.storage.csv -public class AlertUtils {} +class StorageCSVReader {} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala new file mode 100644 index 0000000000..95d98669b8 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.csv + +import org.apache.linkis.common.io.{MetaData, Record} +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.domain.DataType +import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord} + +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.StringUtils + +import java.io._ + +class StorageCSVWriter( + val charset: String, + val separator: String, + val quoteRetouchEnable: Boolean, + val outputStream: OutputStream +) extends CSVFsWriter + with Logging { + + var keepNewline: Boolean = false; + + def this( + charset: String, + separator: String, + quoteRetouchEnable: Boolean, + outputStream: OutputStream, + keepNewline: Boolean + ) { + this(charset, separator, quoteRetouchEnable, outputStream) + this.keepNewline = keepNewline + } + + private val delimiter = separator match { + // Compatible with possible missing escape characters + case "t" => '\t' + case separ if StringUtils.isNotEmpty(separ) => separ + case _ => '\t' + } + + private val buffer: StringBuilder = new StringBuilder(50000) + + @scala.throws[IOException] + override def addMetaData(metaData: MetaData): Unit = { + val head = metaData.asInstanceOf[TableMetaData].columns.map(_.columnName) + write(head) + } + + private def compact(row: Array[String]): String = { + val quotationMarks: String = "\"" + val dealNewlineSymbolMarks: String = "\n" + + def decorateValue(v: String): String = { + if (StringUtils.isBlank(v)) v + else { + var res = v + if (quoteRetouchEnable) { + res = s"$quotationMarks${v.replaceAll(quotationMarks, "")}$quotationMarks" + } + if (!this.keepNewline) { + res = res.replaceAll(dealNewlineSymbolMarks, " ") + } + logger.debug("decorateValue with input:" + v + " output:" + res) + res + } + } + + if (logger.isDebugEnabled()) { + logger.debug("delimiter:" + delimiter.toString) + } + + row.map(x => decorateValue(x)).toList.mkString(delimiter.toString) + "\n" + } + + private def write(row: Array[String]) = { + val content: String = compact(row) + if (buffer.length + content.length > 49500) { + IOUtils.write(buffer.toString().getBytes(charset), outputStream) + buffer.clear() + } + buffer.append(content) + } + + @scala.throws[IOException] + override def addRecord(record: Record): Unit = { + val body = record.asInstanceOf[TableRecord].row.map(DataType.valueToString) + write(body) + } + + override def flush(): Unit = { + IOUtils.write(buffer.toString().getBytes(charset), outputStream) + buffer.clear() + } + + override def close(): Unit = { + flush() + IOUtils.closeQuietly(outputStream) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala new file mode 100644 index 0000000000..036cd8bfa2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala @@ -0,0 +1,164 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.domain + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.conf.LinkisStorageConf + +import java.math.{BigDecimal => JavaBigDecimal} +import java.sql.{Date, Timestamp} + +object DataType extends Logging { + + val LOWCASE_NULL_VALUE = "null" + + val DECIMAL_REGEX = "^decimal\\(\\s*\\d*\\s*,\\s*\\d*\\s*\\)".r.unanchored + + val SHORT_REGEX = "^short.*".r.unanchored + val INT_REGEX = "^int.*".r.unanchored + val LONG_REGEX = "^long.*".r.unanchored + val BIGINT_REGEX = "^bigint.*".r.unanchored + val FLOAT_REGEX = "^float.*".r.unanchored + val DOUBLE_REGEX = "^double.*".r.unanchored + + val VARCHAR_REGEX = "^varchar.*".r.unanchored + val CHAR_REGEX = "^char.*".r.unanchored + + val ARRAY_REGEX = "array.*".r.unanchored + + val MAP_REGEX = "map.*".r.unanchored + + val LIST_REGEX = "list.*".r.unanchored + + val STRUCT_REGEX = "struct.*".r.unanchored + + implicit def toDataType(dataType: String): DataType = dataType match { + case "void" | "null" => NullType + case "string" => StringType + case "boolean" => BooleanType + case SHORT_REGEX() => ShortIntType + case LONG_REGEX() => LongType + case BIGINT_REGEX() => BigIntType + case INT_REGEX() | "integer" | "smallint" => IntType + case FLOAT_REGEX() => FloatType + case DOUBLE_REGEX() => DoubleType + case VARCHAR_REGEX() => VarcharType + case CHAR_REGEX() => CharType + case "date" => DateType + case "timestamp" => TimestampType + case "binary" => BinaryType + case "decimal" | DECIMAL_REGEX() => DecimalType + case ARRAY_REGEX() => ArrayType + case MAP_REGEX() => MapType + case LIST_REGEX() => ListType + case STRUCT_REGEX() => StructType + case _ => StringType + } + + def toValue(dataType: DataType, value: String): Any = { + var newValue: String = value + if (isLinkisNull(value)) { + if (!LinkisStorageConf.LINKIS_RESULT_ENABLE_NULL) { + return null + } else { + newValue = Dolphin.NULL + } + } + Utils.tryCatch(dataType match { + case NullType => null + case StringType | CharType | VarcharType | StructType | ListType | ArrayType | MapType => + newValue + case BooleanType => if (isNumberNull(newValue)) null else newValue.toBoolean + case ShortIntType => if (isNumberNull(newValue)) null else newValue.toShort + case IntType => if (isNumberNull(newValue)) null else newValue.toInt + case LongType | BigIntType => if (isNumberNull(newValue)) null else newValue.toLong + case FloatType => if (isNumberNull(newValue)) null else newValue.toFloat + case DoubleType => if (isNumberNull(newValue)) null else newValue.toDouble + case DecimalType => if (isNumberNull(newValue)) null else new JavaBigDecimal(newValue) + case DateType => if (isNumberNull(newValue)) null else Date.valueOf(newValue) + case TimestampType => + if (isNumberNull(newValue)) null else Timestamp.valueOf(newValue).toString.stripSuffix(".0") + case BinaryType => if (isNull(newValue)) null else newValue.getBytes() + case _ => newValue + }) { t => + logger.debug(s"Failed to $newValue switch to dataType:", t) + newValue + } + } + + def isLinkisNull(value: String): Boolean = { + if (value == null || value == Dolphin.LINKIS_NULL) true else false + } + + def isNull(value: String): Boolean = + if (value == null || value == Dolphin.NULL || value.trim == "") true else false + + def isNumberNull(value: String): Boolean = + if (null == value || Dolphin.NULL.equalsIgnoreCase(value) || value.trim == "") { + true + } else { + false + } + + def valueToString(value: Any): String = { + if (null == value) return null + value match { + case javaDecimal: JavaBigDecimal => + javaDecimal.toPlainString + case _ => value.toString + } + } + +} + +abstract class DataType(val typeName: String, val javaSQLType: Int) { + override def toString: String = typeName +} + +case object NullType extends DataType("void", 0) +case object StringType extends DataType("string", 12) +case object BooleanType extends DataType("boolean", 16) +case object TinyIntType extends DataType("tinyint", -6) +case object ShortIntType extends DataType("short", 5) +case object IntType extends DataType("int", 4) +case object LongType extends DataType("long", -5) +case object BigIntType extends DataType("bigint", -5) +case object FloatType extends DataType("float", 6) +case object DoubleType extends DataType("double", 8) +case object CharType extends DataType("char", 1) +case object VarcharType extends DataType("varchar", 12) +case object DateType extends DataType("date", 91) +case object TimestampType extends DataType("timestamp", 93) +case object BinaryType extends DataType("binary", -2) +case object DecimalType extends DataType("decimal", 3) +case object ArrayType extends DataType("array", 2003) +case object MapType extends DataType("map", 2000) +case object ListType extends DataType("list", 2001) +case object StructType extends DataType("struct", 2002) +case object BigDecimalType extends DataType("bigdecimal", 3) + +case class Column(columnName: String, dataType: DataType, comment: String) { + + def toArray: Array[Any] = { + Array[Any](columnName, dataType, comment) + } + + override def toString: String = + s"columnName:$columnName,dataType:$dataType,comment:$comment" + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala new file mode 100644 index 0000000000..667f1a635e --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.domain + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.FAILED_TO_READ_INTEGER +import org.apache.linkis.storage.exception.{ + StorageErrorCode, + StorageErrorException, + StorageWarnException +} +import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} + +import java.io.{InputStream, IOException} + +object Dolphin extends Logging { + + val CHAR_SET = StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue + val MAGIC = "dolphin" + + val MAGIC_BYTES = MAGIC.getBytes(StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue) + val MAGIC_LEN = MAGIC_BYTES.length + + val DOLPHIN_FILE_SUFFIX = ".dolphin" + + val COL_SPLIT = "," + val COL_SPLIT_BYTES = COL_SPLIT.getBytes("utf-8") + val COL_SPLIT_LEN = COL_SPLIT_BYTES.length + + val NULL = "NULL" + val NULL_BYTES = NULL.getBytes("utf-8") + + val LINKIS_NULL = "LINKIS_NULL" + val LINKIS_NULL_BYTES = LINKIS_NULL.getBytes("utf-8") + + val INT_LEN = 10 + + val FILE_EMPTY = 31 + + def getBytes(value: Any): Array[Byte] = { + value.toString.getBytes(CHAR_SET) + } + + /** + * Convert a bytes array to a String content 将bytes数组转换为String内容 + * @param bytes + * @param start + * @param len + * @return + */ + def getString(bytes: Array[Byte], start: Int, len: Int): String = { + try { + new String(bytes, start, len, Dolphin.CHAR_SET) + } catch { + case e: OutOfMemoryError => + logger.error("bytes to String oom {} Byte", bytes.length) + throw new StorageErrorException( + StorageErrorCode.FS_OOM.getCode, + StorageErrorCode.FS_OOM.getMessage, + e + ) + } + } + + def toStringValue(value: String): String = { + if (LINKIS_NULL.equals(value)) { + NULL + } else { + value + } + } + + /** + * Read an integer value that converts the array to a byte of length 10 bytes + * 读取整数值,该值为将数组转换为10字节长度的byte + * @param inputStream + * @return + */ + def readInt(inputStream: InputStream): Int = { + val bytes = new Array[Byte](INT_LEN + 1) + if (StorageUtils.readBytes(inputStream, bytes, INT_LEN) != INT_LEN) { + throw new StorageWarnException( + FAILED_TO_READ_INTEGER.getErrorCode, + FAILED_TO_READ_INTEGER.getErrorDesc + ) + } + getString(bytes, 0, INT_LEN).toInt + } + + /** + * Print integers at a fixed length(将整数按固定长度打印) + * @param value + * @return + */ + def getIntBytes(value: Int): Array[Byte] = { + val str = value.toString + val res = "0" * (INT_LEN - str.length) + str + Dolphin.getBytes(res) + } + + def getType(inputStream: InputStream): String = { + val bytes = new Array[Byte](100) + val len = StorageUtils.readBytes(inputStream, bytes, Dolphin.MAGIC_LEN + INT_LEN) + if (len == -1) return null + getType(Dolphin.getString(bytes, 0, len)) + } + + def getType(content: String): String = { + if (content.length < MAGIC.length || content.substring(0, MAGIC.length) != MAGIC) { + throw new IOException(s"File header type must be dolphin,content:$content is not") + } + content.substring(MAGIC.length, MAGIC.length + INT_LEN).toInt.toString + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala new file mode 100644 index 0000000000..fac0a2d01b --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/MethodEntity.scala @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.domain + +import java.lang.reflect.Type + +import com.google.gson.GsonBuilder + +/** + * @param id + * Engine unique Id(engine唯一的Id) + * @param fsType + * Fs type(fs类型) + * @param creatorUser + * Create a user to start the corresponding jvm user(创建用户为对应启动的jvm用户) + * @param proxyUser + * Proxy user(代理用户) + * @param clientIp + * client Ip for whitelist control(ip用于白名单控制) + * @param methodName + * Method name called(调用的方法名) + * @param params + * Method parameter(方法参数) + */ +case class MethodEntity( + id: Long, + fsType: String, + creatorUser: String, + proxyUser: String, + clientIp: String, + methodName: String, + params: Array[AnyRef] +) { + + override def toString: String = { + s"id:$id, methodName:$methodName, fsType:$fsType, " + + s"creatorUser:$creatorUser, proxyUser:$proxyUser, clientIp:$clientIp, " + } + +} + +object MethodEntitySerializer { + + val gson = new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").create + + /** + * Serialized to code as a MethodEntity object 序列化为code为MethodEntity对象 + * @param code + * @return + */ + def deserializer(code: String): MethodEntity = gson.fromJson(code, classOf[MethodEntity]) + + /** + * Serialize MethodEntity to code 序列化MethodEntity为code + * @param methodEntity + * @return + */ + def serializer(methodEntity: MethodEntity): String = gson.toJson(methodEntity) + + /** + * Serialize a java object as a string 序列化java对象为字符串 + * @param value + * @return + */ + def serializerJavaObject(value: Any): String = gson.toJson(value) + + /** + * Deserialize a string into a java object 将字符串解序列化为java对象 + * @param json + * @param classType + * @tparam T + * @return + */ + def deserializerToJavaObject[T](json: String, classType: Class[T]): T = { + gson.fromJson(json, classType) + } + + def deserializerToJavaObject[T](json: String, oType: Type): T = { + gson.fromJson(json, oType) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsReader.scala new file mode 100644 index 0000000000..621145cb4d --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsReader.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.excel + +import org.apache.linkis.common.io.FsReader + +abstract class ExcelFsReader extends FsReader {} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptMetaData.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsWriter.scala similarity index 59% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptMetaData.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsWriter.scala index 28999aae75..079920bef3 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptMetaData.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelFsWriter.scala @@ -15,27 +15,33 @@ * limitations under the License. */ -package org.apache.linkis.storage.script; +package org.apache.linkis.storage.excel -import org.apache.linkis.common.io.MetaData; +import org.apache.linkis.common.io.FsWriter -public class ScriptMetaData implements MetaData { - private Variable[] variables; +import java.io.OutputStream - public ScriptMetaData(Variable[] variables) { - this.variables = variables; - } +abstract class ExcelFsWriter extends FsWriter { + val charset: String + val sheetName: String + val dateFormat: String + val autoFormat: Boolean +} - @Override - public MetaData cloneMeta() { - return new ScriptMetaData(variables.clone()); - } +object ExcelFsWriter { - public Variable[] getMetaData() { - return variables; - } + def getExcelFsWriter( + charset: String, + sheetName: String, + dateFormat: String, + outputStream: OutputStream, + autoFormat: Boolean + ): ExcelFsWriter = new StorageExcelWriter( + charset, + sheetName, + dateFormat, + outputStream: OutputStream, + autoFormat: Boolean + ) - public void setMetaData(Variable[] variables) { - this.variables = variables; - } } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelXlsReader.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelXlsReader.java new file mode 100644 index 0000000000..aac0ff54a6 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/ExcelXlsReader.java @@ -0,0 +1,378 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.excel; + +import org.apache.poi.hssf.eventusermodel.EventWorkbookBuilder.SheetRecordCollectingListener; +import org.apache.poi.hssf.eventusermodel.*; +import org.apache.poi.hssf.eventusermodel.dummyrecord.LastCellOfRowDummyRecord; +import org.apache.poi.hssf.eventusermodel.dummyrecord.MissingCellDummyRecord; +import org.apache.poi.hssf.model.HSSFFormulaParser; +import org.apache.poi.hssf.record.*; +import org.apache.poi.hssf.usermodel.HSSFWorkbook; +import org.apache.poi.poifs.filesystem.POIFSFileSystem; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + + + +public class ExcelXlsReader implements HSSFListener { + private int minColumns = -1; + + private POIFSFileSystem fs; + + private InputStream inputStream; + + private int lastRowNumber; + + private int lastColumnNumber; + + /** + * Should we output the formula, or the value it has? + */ + private boolean outputFormulaValues = true; + + /** + * For parsing Formulas + */ + private SheetRecordCollectingListener workbookBuildingListener; + + // excel2003Workbook(工作薄) + private HSSFWorkbook stubWorkbook; + + // Records we pick up as we process + private SSTRecord sstRecord; + + private FormatTrackingHSSFListener formatListener; + + // Table index(表索引) + private int sheetIndex = -1; + + private BoundSheetRecord[] orderedBSRs; + + @SuppressWarnings("unchecked") + private ArrayList boundSheetRecords = new ArrayList(); + + // For handling formulas with string results + private int nextRow; + + private int nextColumn; + + private boolean outputNextStringRecord; + + // Current line(当前行) + private int curRow = 0; + + // a container that stores row records(存储行记录的容器) + private List rowlist = new ArrayList(); + + + @SuppressWarnings("unused") + private String sheetName; + + private IExcelRowDeal excelRowDeal; + + + public void init(IExcelRowDeal excelRowDeal, InputStream inputStream) { + this.excelRowDeal = excelRowDeal; + this.inputStream = inputStream; + } + + + /** + * Traverse all the sheets under excel + * 遍历excel下所有的sheet + * @throws IOException + */ + public void process() throws IOException { + this.fs = new POIFSFileSystem(this.inputStream); + MissingRecordAwareHSSFListener listener = new MissingRecordAwareHSSFListener(this); + formatListener = new FormatTrackingHSSFListener(listener); + HSSFEventFactory factory = new HSSFEventFactory(); + HSSFRequest request = new HSSFRequest(); + if (outputFormulaValues) { + request.addListenerForAllRecords(formatListener); + } else { + workbookBuildingListener = new SheetRecordCollectingListener(formatListener); + request.addListenerForAllRecords(workbookBuildingListener); + } + factory.processWorkbookEvents(request, fs); + } + + /** + * HSSFListener listener method, processing Record + * HSSFListener 监听方法,处理 Record + */ + @Override + @SuppressWarnings("unchecked") + public void processRecord(Record record) { + int thisRow = -1; + int thisColumn = -1; + String thisStr = null; + String value = null; + switch (record.getSid()) { + case BoundSheetRecord.sid: + boundSheetRecords.add(record); + break; + case BOFRecord.sid: + BOFRecord br = (BOFRecord) record; + if (br.getType() == BOFRecord.TYPE_WORKSHEET) { + // Create a child workbook if needed(如果有需要,则建立子工作薄) + if (workbookBuildingListener != null && stubWorkbook == null) { + stubWorkbook = workbookBuildingListener.getStubHSSFWorkbook(); + } + + sheetIndex++; + if (orderedBSRs == null) { + orderedBSRs = BoundSheetRecord.orderByBofPosition(boundSheetRecords); + } + sheetName = orderedBSRs[sheetIndex].getSheetname(); + } + break; + + case SSTRecord.sid: + sstRecord = (SSTRecord) record; + break; + + case BlankRecord.sid: + BlankRecord brec = (BlankRecord) record; + thisRow = brec.getRow(); + thisColumn = brec.getColumn(); + thisStr = ""; + rowlist.add(thisColumn, thisStr); + break; + case BoolErrRecord.sid: // Cell is boolean(单元格为布尔类型) + BoolErrRecord berec = (BoolErrRecord) record; + thisRow = berec.getRow(); + thisColumn = berec.getColumn(); + thisStr = berec.getBooleanValue() + ""; + rowlist.add(thisColumn, thisStr); + break; + + case FormulaRecord.sid: // Cell is a formula type(单元格为公式类型) + FormulaRecord frec = (FormulaRecord) record; + thisRow = frec.getRow(); + thisColumn = frec.getColumn(); + if (outputFormulaValues) { + if (Double.isNaN(frec.getValue())) { + // Formula result is a string + // This is stored in the next record + outputNextStringRecord = true; + nextRow = frec.getRow(); + nextColumn = frec.getColumn(); + } else { + thisStr = formatListener.formatNumberDateCell(frec); + } + } else { + thisStr = '"' + HSSFFormulaParser.toFormulaString(stubWorkbook, frec.getParsedExpression()) + '"'; + } + rowlist.add(thisColumn, thisStr); + break; + case StringRecord.sid:// a string of formulas in a cell(单元格中公式的字符串) + if (outputNextStringRecord) { + // String for formula + StringRecord srec = (StringRecord) record; + thisStr = srec.getString(); + thisRow = nextRow; + thisColumn = nextColumn; + outputNextStringRecord = false; + } + break; + case LabelRecord.sid: + LabelRecord lrec = (LabelRecord) record; + curRow = thisRow = lrec.getRow(); + thisColumn = lrec.getColumn(); + value = lrec.getValue().trim(); + value = value.equals("") ? " " : value; + this.rowlist.add(thisColumn, value); + break; + case LabelSSTRecord.sid: // Cell is a string type(单元格为字符串类型) + LabelSSTRecord lsrec = (LabelSSTRecord) record; + curRow = thisRow = lsrec.getRow(); + thisColumn = lsrec.getColumn(); + if (sstRecord == null) { + rowlist.add(thisColumn, " "); + } else { + value = sstRecord.getString(lsrec.getSSTIndex()).toString().trim(); + value = value.equals("") ? " " : value; + rowlist.add(thisColumn, value); + } + break; + case NumberRecord.sid: // Cell is a numeric type(单元格为数字类型) + NumberRecord numrec = (NumberRecord) record; + curRow = thisRow = numrec.getRow(); + thisColumn = numrec.getColumn(); + value = formatListener.formatNumberDateCell(numrec).trim(); + value = value.equals("") ? "0" : value; + // Add column values to the container(向容器加入列值) + rowlist.add(thisColumn, value); + break; + default: + break; + } + + // Encountered a new line of operations(遇到新行的操作)( + if (thisRow != -1 && thisRow != lastRowNumber) { + lastColumnNumber = -1; + } + + // Null operation(空值的操作) + if (record instanceof MissingCellDummyRecord) { + MissingCellDummyRecord mc = (MissingCellDummyRecord) record; + curRow = thisRow = mc.getRow(); + thisColumn = mc.getColumn(); + rowlist.add(thisColumn, " "); + } + + // Update row and column values(更新行和列的值) + if (thisRow > -1) + lastRowNumber = thisRow; + if (thisColumn > -1) + lastColumnNumber = thisColumn; + + // End of line operation(行结束时的操作) + if (record instanceof LastCellOfRowDummyRecord) { + if (minColumns > 0) { + // Column value is re-empted(列值重新置空) + if (lastColumnNumber == -1) { + lastColumnNumber = 0; + } + } + lastColumnNumber = -1; + + // At the end of each line, the dealRow() method(每行结束时, dealRow() 方法) + excelRowDeal.dealRow(orderedBSRs, sheetIndex, curRow, rowlist); + // Empty container(清空容器) + rowlist.clear(); + } + } + + public void close() { + try { + if (fs != null) { + fs.close(); + } + if (inputStream != null) { + inputStream.close(); + } + } catch (Exception e) { + + } + } + + public static void main(String[] args) { + + } +} + +class ExcelAnalysisException extends RuntimeException { + public ExcelAnalysisException() { + } + + public ExcelAnalysisException(String message) { + super(message); + } + + public ExcelAnalysisException(String message, Throwable cause) { + super(message, cause); + } + + public ExcelAnalysisException(Throwable cause) { + super(cause); + } +} + +interface IExcelRowDeal { + void dealRow(BoundSheetRecord[] orderedBSRs, int sheetIndex, int curRow, List rowlist); +} + + +class FirstRowDeal implements IExcelRowDeal { + + private List sheetNames = new ArrayList<>(); + private List row; + + public List getSheetNames() { + return sheetNames; + } + + public void setSheetNames(List sheetNames) { + this.sheetNames = sheetNames; + } + + public List getRow() { + return row; + } + + public void setRow(List row) { + this.row = row; + } + + @Override + public void dealRow(BoundSheetRecord[] orderedBSRs, int sheetIndex, int curRow, List rowlist) { + for (BoundSheetRecord record : orderedBSRs) { + sheetNames.add(record.getSheetname()); + } + row = rowlist; + throw new ExcelAnalysisException("Finished to deal first row"); + } + +} + +class RowToCsvDeal implements IExcelRowDeal { + + private Map params; + private List sheetNames; + private OutputStream outputStream; + private Boolean hasHeader; + private Boolean fisrtRow = true; + public void init(Boolean hasHeader, List sheetNames, OutputStream outputStream) { + this.hasHeader = hasHeader; + this.sheetNames = sheetNames; + this.outputStream = outputStream; + } + + @Override + public void dealRow(BoundSheetRecord[] orderedBSRs, int sheetIndex, int curRow, List rowlist) { + String sheetName = orderedBSRs[sheetIndex].getSheetname(); + if (sheetNames == null || sheetNames.isEmpty() || sheetNames.contains(sheetName)) { + if (! (curRow == 0 && hasHeader)) { + try { + if (fisrtRow) { + fisrtRow = false; + } else { + outputStream.write("\n".getBytes()); + } + int len = rowlist.size(); + for (int i = 0; i < len; i ++) { + outputStream.write(rowlist.get(i).replaceAll("\n|\t", " ").getBytes("utf-8")); + if (i < len -1) { + outputStream.write("\t".getBytes()); + } + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + } +} \ No newline at end of file diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/ResultResource.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelReader.scala similarity index 91% rename from linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/ResultResource.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelReader.scala index 0361b43af3..4a4f9bdf16 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/rm/ResultResource.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelReader.scala @@ -15,6 +15,6 @@ * limitations under the License. */ -package org.apache.linkis.manager.rm; +package org.apache.linkis.storage.excel -public interface ResultResource {} +class StorageExcelReader {} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala new file mode 100644 index 0000000000..9ea83130df --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala @@ -0,0 +1,236 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.excel + +import org.apache.linkis.common.io.{MetaData, Record} +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.domain._ +import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord} + +import org.apache.commons.io.IOUtils +import org.apache.poi.ss.usermodel._ +import org.apache.poi.xssf.streaming.{SXSSFCell, SXSSFSheet, SXSSFWorkbook} + +import java.io._ +import java.math.BigDecimal +import java.util +import java.util.Date + +import scala.collection.mutable.ArrayBuffer + +class StorageExcelWriter( + val charset: String, + val sheetName: String, + val dateFormat: String, + val outputStream: OutputStream, + val autoFormat: Boolean +) extends ExcelFsWriter + with Logging { + + protected var workBook: SXSSFWorkbook = _ + protected var sheet: SXSSFSheet = _ + protected var format: DataFormat = _ + protected var types: Array[DataType] = _ + protected var rowPoint = 0 + protected var columnCounter = 0 + protected val styles = new util.HashMap[String, CellStyle]() + protected var isFlush = true + protected val os = new ByteArrayOutputStream() + protected var is: ByteArrayInputStream = _ + + def init: Unit = { + workBook = new SXSSFWorkbook() + sheet = workBook.createSheet(sheetName) + } + + def getDefaultHeadStyle: CellStyle = { + val headerFont = workBook.createFont + headerFont.setBold(true) + headerFont.setFontHeightInPoints(14.toShort) + headerFont.setColor(IndexedColors.BLACK.getIndex) + val headerCellStyle = workBook.createCellStyle + headerCellStyle.setFont(headerFont) + headerCellStyle + } + + def getWorkBook: Workbook = { + // 自适应列宽 + sheet.trackAllColumnsForAutoSizing() + for (elem <- 0 to columnCounter) { + sheet.autoSizeColumn(elem) + } + workBook + } + + def createCellStyle(dataType: DataType): CellStyle = { + val style = workBook.createCellStyle() + format = workBook.createDataFormat() + dataType.toString match { + case _ => style.setDataFormat(format.getFormat("@")) + } + if (autoFormat) { + dataType match { + case StringType => style.setDataFormat(format.getFormat("@")) + case TinyIntType => style.setDataFormat(format.getFormat("#")) + case ShortIntType => style.setDataFormat(format.getFormat("#")) + case IntType => style.setDataFormat(format.getFormat("#")) + case LongType => style.setDataFormat(format.getFormat("#.##E+00")) + case BigIntType => style.setDataFormat(format.getFormat("#.##E+00")) + case FloatType => style.setDataFormat(format.getFormat("#.0000000000")) + case DoubleType => style.setDataFormat(format.getFormat("#.0000000000")) + case CharType => style.setDataFormat(format.getFormat("@")) + case VarcharType => style.setDataFormat(format.getFormat("@")) + case DateType => style.setDataFormat(format.getFormat("m/d/yy h:mm")) + case TimestampType => style.setDataFormat(format.getFormat("m/d/yy h:mm")) + case DecimalType => style.setDataFormat(format.getFormat("#.000000000")) + case BigDecimalType => style.setDataFormat(format.getFormat("#.000000000")) + case _ => style.setDataFormat(format.getFormat("@")) + } + } + style + } + + def getCellStyle(dataType: DataType): CellStyle = { + val style = styles.get(dataType.typeName) + if (style == null) { + val newStyle = createCellStyle(dataType) + styles.put(dataType.typeName, newStyle) + newStyle + } else { + style + } + } + + @scala.throws[IOException] + override def addMetaData(metaData: MetaData): Unit = { + init + val tableHead = sheet.createRow(0) + val columns = metaData.asInstanceOf[TableMetaData].columns + val columnType = new ArrayBuffer[DataType]() + for (elem <- columns) { + val headCell = tableHead.createCell(columnCounter) + headCell.setCellValue(elem.columnName) + headCell.setCellStyle(getDefaultHeadStyle) + columnType += elem.dataType + columnCounter += 1 + } + types = columnType.toArray + rowPoint += 1 + } + + @scala.throws[IOException] + override def addRecord(record: Record): Unit = { + // TODO: 是否需要替换null值 + val tableBody = sheet.createRow(rowPoint) + var colunmPoint = 0 + val excelRecord = record.asInstanceOf[TableRecord].row + for (elem <- excelRecord) { + val cell = tableBody.createCell(colunmPoint) + val dataType = types.apply(colunmPoint) + if (autoFormat) { + setCellTypeValue(dataType, elem, cell) + } else { + cell.setCellValue(DataType.valueToString(elem)) + } + cell.setCellStyle(getCellStyle(dataType)) + colunmPoint += 1 + } + rowPoint += 1 + } + + private def setCellTypeValue(dataType: DataType, elem: Any, cell: SXSSFCell): Unit = { + if (null == elem) return + Utils.tryCatch { + dataType match { + case StringType => cell.setCellValue(DataType.valueToString(elem)) + case TinyIntType => cell.setCellValue(elem.toString.toInt) + case ShortIntType => cell.setCellValue(elem.toString.toInt) + case IntType => cell.setCellValue(elem.toString.toInt) + case LongType => cell.setCellValue(elem.toString.toLong) + case BigIntType => cell.setCellValue(elem.toString.toLong) + case FloatType => cell.setCellValue(elem.toString.toFloat) + case DoubleType => + doubleCheck(elem.toString) + cell.setCellValue(elem.toString.toDouble) + case CharType => cell.setCellValue(DataType.valueToString(elem)) + case VarcharType => cell.setCellValue(DataType.valueToString(elem)) + case DateType => cell.setCellValue(getDate(elem)) + case TimestampType => cell.setCellValue(getDate(elem)) + case DecimalType => + doubleCheck(DataType.valueToString(elem)) + cell.setCellValue(DataType.valueToString(elem).toDouble) + case BigDecimalType => + doubleCheck(DataType.valueToString(elem)) + cell.setCellValue(DataType.valueToString(elem).toDouble) + case _ => + val value = DataType.valueToString(elem) + cell.setCellValue(value) + } + } { case e: Exception => + cell.setCellValue(DataType.valueToString(elem)) + } + } + + private def getDate(value: Any): Date = { + if (value.isInstanceOf[Date]) { + value.asInstanceOf[Date] + } else { + throw new NumberFormatException( + s"Value ${value} with class : ${value.getClass.getName} is not a valid type of Date." + ); + } + } + + /** + * Check whether the double exceeds the number of digits, which will affect the data accuracy + * @param elemValue + */ + private def doubleCheck(elemValue: String): Unit = { + val value = new BigDecimal(elemValue).stripTrailingZeros + if ((value.precision - value.scale) > 15) { + throw new NumberFormatException( + s"Value ${elemValue} error : This data exceeds 15 significant digits." + ); + } + } + + override def flush(): Unit = { + getWorkBook.write(os) + val content: Array[Byte] = os.toByteArray + is = new ByteArrayInputStream(content) + val buffer: Array[Byte] = new Array[Byte](1024) + var bytesRead: Int = 0 + while (isFlush) { + bytesRead = is.read(buffer, 0, 1024) + if (bytesRead == -1) { + isFlush = false + } else { + outputStream.write(buffer, 0, bytesRead) + } + } + } + + override def close(): Unit = { + if (isFlush) flush() + IOUtils.closeQuietly(outputStream) + IOUtils.closeQuietly(is) + IOUtils.closeQuietly(os) + IOUtils.closeQuietly(workBook) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageMultiExcelWriter.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageMultiExcelWriter.scala similarity index 58% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageMultiExcelWriter.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageMultiExcelWriter.scala index a028397bd6..b47e04394f 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/StorageMultiExcelWriter.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageMultiExcelWriter.scala @@ -15,44 +15,39 @@ * limitations under the License. */ -package org.apache.linkis.storage.excel; +package org.apache.linkis.storage.excel -import org.apache.poi.xssf.streaming.SXSSFWorkbook; +import org.apache.poi.xssf.streaming.SXSSFWorkbook -import java.io.OutputStream; +import java.io.OutputStream -public class StorageMultiExcelWriter extends StorageExcelWriter { +class StorageMultiExcelWriter( + override val outputStream: OutputStream, + override val autoFormat: Boolean +) extends StorageExcelWriter(null, null, null, outputStream, autoFormat) { - private int sheetIndex = 0; + private var sheetIndex = 0 - public StorageMultiExcelWriter(OutputStream outputStream, boolean autoFormat) { - super(null, null, null, outputStream, autoFormat); - } - - @Override - public void init() { - if (workBook == null) { - workBook = new SXSSFWorkbook(); - } + override def init: Unit = { + if (workBook == null) workBook = new SXSSFWorkbook() // 1.让表自适应列宽 if (sheet != null) { - sheet.trackAllColumnsForAutoSizing(); - for (int i = 0; i <= columnCounter; i++) { - sheet.autoSizeColumn(i); - } + sheet.trackAllColumnsForAutoSizing() + 0 to columnCounter foreach (sheet.autoSizeColumn) } // 2.重置参数 // 2.1 columnCounter 归0 - columnCounter = 0; + columnCounter = 0 // 2.2 创建新sheet - sheet = workBook.createSheet("resultset" + sheetIndex); + sheet = workBook.createSheet(s"resultset$sheetIndex") // 2.3 sheetIndex自增 - sheetIndex++; + sheetIndex += 1 // 2.4 types 置空 - types = null; + types = null // 2.5 rowPoint 归0 记录行数 - rowPoint = 0; + rowPoint = 0 // 2.6 styles 清空 - styles.clear(); + styles.clear() } + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala new file mode 100644 index 0000000000..b21bf7e492 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOClient.scala @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.io + +import org.apache.linkis.storage.domain.MethodEntity +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.MUST_REGISTER_TOC +import org.apache.linkis.storage.exception.StorageErrorException + +import java.util.UUID + +import org.slf4j.{Logger, LoggerFactory} + +/** + * IOClient is used to execute the proxy as the ujes code execution entry in io and get the return + * result. IOClient用于在io进行代理作为ujes的代码执行入口,并获取返回结果 + */ +trait IOClient { + + def execute(user: String, methodEntity: MethodEntity, params: java.util.Map[String, Any]): String + + def executeWithEngine( + user: String, + methodEntity: MethodEntity, + params: java.util.Map[String, Any] + ): Array[String] + +} + +object IOClient { + val logger: Logger = LoggerFactory.getLogger(classOf[IOClient]) + var ioClient: IOClient = null + + val SUCCESS = "SUCCESS" + val FAILED = "FAILED" + + def getIOClient(): IOClient = { + if (ioClient == null) { + throw new StorageErrorException( + MUST_REGISTER_TOC.getErrorCode, + MUST_REGISTER_TOC.getErrorDesc + ) + } + ioClient + } + + /** + * This method is called when ioClient is initialized. ioClient初始化时会调用该方法 + * @param client + * IOClient + */ + def register(client: IOClient): Unit = { + this.ioClient = client + logger.debug(s"IOClient: ${ioClient.toString} registered") + } + + def getFSId(): String = { + UUID.randomUUID().toString + } + +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOClientFactory.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala similarity index 53% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOClientFactory.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala index 244ad59477..51e1589eb7 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/io/IOClientFactory.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/io/IOMethodInterceptorCreator.scala @@ -15,43 +15,39 @@ * limitations under the License. */ -package org.apache.linkis.storage.io; +package org.apache.linkis.storage.io -import org.apache.linkis.storage.exception.StorageErrorException; +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.MUST_REGISTER_TOM +import org.apache.linkis.storage.exception.StorageErrorException -import java.util.UUID; +import org.springframework.cglib.proxy.MethodInterceptor -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +trait IOMethodInterceptorCreator { -import static org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.MUST_REGISTER_TOC; - -public class IOClientFactory { - private static final Logger logger = LoggerFactory.getLogger(IOClientFactory.class); - private static IOClient ioClient = null; + def createIOMethodInterceptor(fsName: String): MethodInterceptor +} - private static final String SUCCESS = "SUCCESS"; - private static final String FAILED = "FAILED"; +object IOMethodInterceptorCreator { - public static IOClient getIOClient() throws StorageErrorException { - if (ioClient == null) { - throw new StorageErrorException( - MUST_REGISTER_TOC.getErrorCode(), MUST_REGISTER_TOC.getErrorDesc()); - } - return ioClient; - } + var interceptorCreator: IOMethodInterceptorCreator = null /** * This method is called when ioClient is initialized. ioClient初始化时会调用该方法 * - * @param client IOClient + * @param interceptorCreator */ - public static void register(IOClient client) { - ioClient = client; - logger.debug("IOClient: {} registered", ioClient.toString()); + def register(interceptorCreator: IOMethodInterceptorCreator): Unit = { + this.interceptorCreator = interceptorCreator } - public static String getFSId() { - return UUID.randomUUID().toString(); + def getIOMethodInterceptor(fsName: String): MethodInterceptor = { + if (interceptorCreator == null) { + throw new StorageErrorException( + MUST_REGISTER_TOM.getErrorCode, + MUST_REGISTER_TOM.getErrorDesc + ) + } + interceptorCreator.createIOMethodInterceptor(fsName) } + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala new file mode 100644 index 0000000000..d4836731db --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{Fs, FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSet +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.{ + THE_FILE_IS_EMPTY, + UNSUPPORTED_RESULT +} +import org.apache.linkis.storage.exception.{StorageErrorException, StorageWarnException} +import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} + +import org.apache.commons.lang3.StringUtils + +import java.text.MessageFormat +import java.util +import java.util.Locale + +class DefaultResultSetFactory extends ResultSetFactory with Logging { + + private val resultClasses: Map[String, Class[ResultSet[ResultMetaData, ResultRecord]]] = + StorageUtils.loadClasses( + StorageConfiguration.STORAGE_RESULT_SET_CLASSES.getValue, + StorageConfiguration.STORAGE_RESULT_SET_PACKAGE.getValue, + t => t.newInstance().resultSetType().toLowerCase(Locale.getDefault) + ) + + val resultTypes = ResultSetFactory.resultSetType.keys.toArray + + override def getResultSetByType(resultSetType: String): ResultSet[_ <: MetaData, _ <: Record] = { + if (!resultClasses.contains(resultSetType)) { + throw new StorageErrorException( + UNSUPPORTED_RESULT.getErrorCode, + MessageFormat.format(UNSUPPORTED_RESULT.getErrorDesc, resultSetType) + ) + } + resultClasses(resultSetType).newInstance() + } + + override def getResultSetByPath(fsPath: FsPath): ResultSet[_ <: MetaData, _ <: Record] = { + getResultSetByPath(fsPath, StorageUtils.getJvmUser) + } + + override def getResultSetByContent(content: String): ResultSet[_ <: MetaData, _ <: Record] = { + getResultSetByType(Dolphin.getType(content)) + } + + override def exists(resultSetType: String): Boolean = resultClasses.contains(resultSetType) + + override def isResultSetPath(path: String): Boolean = { + path.endsWith(Dolphin.DOLPHIN_FILE_SUFFIX) + } + + override def isResultSet(content: String): Boolean = + Utils.tryCatch(resultClasses.contains(Dolphin.getType(content))) { t => + logger.info("Wrong result Set: " + t.getMessage) + false + } + + override def getResultSet(output: String): ResultSet[_ <: MetaData, _ <: Record] = + getResultSet(output, StorageUtils.getJvmUser) + + override def getResultSetType: Array[String] = resultTypes + + override def getResultSetByPath(fsPath: FsPath, fs: Fs): ResultSet[_ <: MetaData, _ <: Record] = { + val inputStream = fs.read(fsPath) + val resultSetType = Dolphin.getType(inputStream) + if (StringUtils.isEmpty(resultSetType)) { + throw new StorageWarnException( + THE_FILE_IS_EMPTY.getErrorCode, + MessageFormat.format(THE_FILE_IS_EMPTY.getErrorDesc, fsPath.getPath) + ) + } + Utils.tryQuietly(inputStream.close()) + // Utils.tryQuietly(fs.close()) + getResultSetByType(resultSetType) + } + + override def getResultSetByPath( + fsPath: FsPath, + proxyUser: String + ): ResultSet[_ <: MetaData, _ <: Record] = { + if (fsPath == null) return null + logger.info("Get Result Set By Path:" + fsPath.getPath) + val fs = FSFactory.getFsByProxyUser(fsPath, proxyUser) + fs.init(new util.HashMap[String, String]()) + val inputStream = fs.read(fsPath) + val resultSetType = Dolphin.getType(inputStream) + if (StringUtils.isEmpty(resultSetType)) { + throw new StorageWarnException( + THE_FILE_IS_EMPTY.getErrorCode, + MessageFormat.format(THE_FILE_IS_EMPTY.getErrorDesc, fsPath.getPath) + ) + } + Utils.tryQuietly(inputStream.close()) + Utils.tryQuietly(fs.close()) + getResultSetByType(resultSetType) + } + + override def getResultSet( + output: String, + proxyUser: String + ): ResultSet[_ <: MetaData, _ <: Record] = { + if (isResultSetPath(output)) { + getResultSetByPath(new FsPath(output), proxyUser) + } else if (isResultSet(output)) { + getResultSetByContent(output) + } else null + } + +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultRecord.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultMetaData.scala similarity index 85% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultRecord.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultMetaData.scala index ce2686099b..7871bb2ac9 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultRecord.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultMetaData.scala @@ -15,8 +15,8 @@ * limitations under the License. */ -package org.apache.linkis.storage.resultset; +package org.apache.linkis.storage.resultset -import org.apache.linkis.common.io.Record; +import org.apache.linkis.common.io.MetaData -public interface ResultRecord extends Record {} +abstract class ResultMetaData extends MetaData {} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultRecord.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultRecord.scala new file mode 100644 index 0000000000..ab9244ca98 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultRecord.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.Record + +abstract class ResultRecord extends Record {} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetFactory.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetFactory.scala new file mode 100644 index 0000000000..e6615873b1 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetFactory.scala @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{Fs, FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSet + +import scala.collection.mutable + +trait ResultSetFactory extends scala.AnyRef { + + def getResultSetByType(resultSetType: scala.Predef.String): ResultSet[_ <: MetaData, _ <: Record] + + def getResultSetByPath(fsPath: FsPath): ResultSet[_ <: MetaData, _ <: Record] + def getResultSetByPath(fsPath: FsPath, fs: Fs): ResultSet[_ <: MetaData, _ <: Record] + def getResultSetByContent(content: scala.Predef.String): ResultSet[_ <: MetaData, _ <: Record] + def exists(resultSetType: scala.Predef.String): scala.Boolean + def isResultSetPath(path: scala.Predef.String): scala.Boolean + def isResultSet(content: scala.Predef.String): scala.Boolean + def getResultSet(output: String): ResultSet[_ <: MetaData, _ <: Record] + + def getResultSetByPath(fsPath: FsPath, proxyUser: String): ResultSet[_ <: MetaData, _ <: Record] + + def getResultSet(output: String, proxyUser: String): ResultSet[_ <: MetaData, _ <: Record] + + /** + * The first must-time text(第一个必须时text) + * @return + */ + def getResultSetType: Array[String] +} + +object ResultSetFactory { + + val TEXT_TYPE = "1" + val TABLE_TYPE = "2" + val IO_TYPE = "3" + val PICTURE_TYPE = "4" + val HTML_TYPE = "5" + + /** + * TODO 修改为注册形式,并修改ResultSet的getResultType逻辑 Result set corresponding type record(结果集对应类型记录) + */ + val resultSetType = mutable.LinkedHashMap[String, String]( + TEXT_TYPE -> "TEXT", + TABLE_TYPE -> "TABLE", + IO_TYPE -> "IO", + PICTURE_TYPE -> "PICTURE", + HTML_TYPE -> "HTML" + ) + + val factory = new DefaultResultSetFactory + def getInstance: ResultSetFactory = factory +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala new file mode 100644 index 0000000000..e61cf36b3d --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetReader} +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED +import org.apache.linkis.storage.exception.StorageErrorException +import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord, TableResultSet} + +import java.io.InputStream + +object ResultSetReader extends Logging { + + def getResultSetReader[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + inputStream: InputStream + ): ResultSetReader[K, V] = { + new StorageResultSetReader[K, V](resultSet, inputStream) + } + + def getResultSetReader[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + value: String + ): ResultSetReader[K, V] = { + new StorageResultSetReader[K, V](resultSet, value) + } + + def getResultSetReader(res: String): ResultSetReader[_ <: MetaData, _ <: Record] = { + val rsFactory = ResultSetFactory.getInstance + if (rsFactory.isResultSet(res)) { + val resultSet = rsFactory.getResultSet(res) + ResultSetReader.getResultSetReader(resultSet, res) + } else { + val resPath = new FsPath(res) + val resultSet = rsFactory.getResultSetByPath(resPath) + val fs = FSFactory.getFs(resPath) + fs.init(null) + val reader = ResultSetReader.getResultSetReader(resultSet, fs.read(resPath)) + reader match { + case storageResultSetReader: StorageResultSetReader[_, _] => + storageResultSetReader.setFs(fs) + case _ => + } + reader + } + } + + def getTableResultReader(res: String): ResultSetReader[TableMetaData, TableRecord] = { + val rsFactory = ResultSetFactory.getInstance + if (rsFactory.isResultSet(res)) { + val resultSet = rsFactory.getResultSet(res) + if (ResultSetFactory.TABLE_TYPE != resultSet.resultSetType()) { + throw new StorageErrorException( + TABLE_ARE_NOT_SUPPORTED.getErrorCode, + TABLE_ARE_NOT_SUPPORTED.getErrorDesc + ) + } + ResultSetReader.getResultSetReader(resultSet.asInstanceOf[TableResultSet], res) + } else { + val resPath = new FsPath(res) + val resultSet = rsFactory.getResultSetByPath(resPath) + if (ResultSetFactory.TABLE_TYPE != resultSet.resultSetType()) { + throw new StorageErrorException( + TABLE_ARE_NOT_SUPPORTED.getErrorCode, + TABLE_ARE_NOT_SUPPORTED.getErrorDesc + ) + } + val fs = FSFactory.getFs(resPath) + logger.info("Try to init Fs with path:" + resPath.getPath) + fs.init(null) + ResultSetReader.getResultSetReader(resultSet.asInstanceOf[TableResultSet], fs.read(resPath)) + } + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetWriter.scala new file mode 100644 index 0000000000..cea00bbd60 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetWriter.scala @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetWriter} + +import scala.collection.mutable.ArrayBuffer + +object ResultSetWriter { + + def getResultSetWriter[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + maxCacheSize: Long, + storePath: FsPath + ): ResultSetWriter[K, V] = + new StorageResultSetWriter[K, V](resultSet, maxCacheSize, storePath) + + def getResultSetWriter[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + maxCacheSize: Long, + storePath: FsPath, + proxyUser: String + ): ResultSetWriter[K, V] = { + val writer = new StorageResultSetWriter[K, V](resultSet, maxCacheSize, storePath) + writer.setProxyUser(proxyUser) + writer + } + + def getRecordByWriter( + writer: ResultSetWriter[_ <: MetaData, _ <: Record], + limit: Long + ): Array[Record] = { + val res = writer.toString + getRecordByRes(res, limit) + } + + def getRecordByRes(res: String, limit: Long): Array[Record] = { + val reader = ResultSetReader.getResultSetReader(res) + var count = 0 + val records = new ArrayBuffer[Record]() + reader.getMetaData + while (reader.hasNext && count < limit) { + records += reader.getRecord + count = count + 1 + } + records.toArray + } + + def getLastRecordByRes(res: String): Record = { + val reader = ResultSetReader.getResultSetReader(res) + reader.getMetaData + while (reader.hasNext) { + reader.getRecord + } + reader.getRecord + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala new file mode 100644 index 0000000000..7b3aca62d9 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSet +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.utils.StorageConfiguration + +abstract class StorageResultSet[K <: MetaData, V <: Record] extends ResultSet[K, V] with Logging { + + val resultHeaderBytes = Dolphin.MAGIC_BYTES ++ Dolphin.getIntBytes(resultSetType().toInt) + override val charset: String = StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue + + override def getResultSetPath(parentDir: FsPath, fileName: String): FsPath = { + val path = if (parentDir.getPath.endsWith("/")) { + parentDir.toPath + fileName + Dolphin.DOLPHIN_FILE_SUFFIX + } else { + parentDir.toPath + "/" + fileName + Dolphin.DOLPHIN_FILE_SUFFIX + } + logger.info(s"Get result set path:${path}") + new FsPath(path) + } + + override def getResultSetHeader: Array[Byte] = resultHeaderBytes + + override def belongToPath(path: String): Boolean = path.endsWith(Dolphin.DOLPHIN_FILE_SUFFIX) + + override def belongToResultSet(content: String): Boolean = + Utils.tryCatch(Dolphin.getType(content) == resultSetType()) { t => + logger.info("Wrong result Set: ", t) + false + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala new file mode 100644 index 0000000000..24dd0fdf8a --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala @@ -0,0 +1,174 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{Fs, MetaData, Record} +import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetReader} +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.conf.LinkisStorageConf +import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary +import org.apache.linkis.storage.exception.{ + ColLengthExceedException, + StorageErrorCode, + StorageErrorException, + StorageWarnException +} +import org.apache.linkis.storage.resultset.table.TableMetaData +import org.apache.linkis.storage.utils.StorageUtils + +import java.io.{ByteArrayInputStream, InputStream, IOException} +import java.text.MessageFormat + +import scala.collection.mutable.ArrayBuffer + +class StorageResultSetReader[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + inputStream: InputStream +) extends ResultSetReader[K, V](resultSet, inputStream) + with Logging { + + private val deserializer = resultSet.createResultSetDeserializer + private var metaData: K = _ + private var row: Record = _ + private var rowCount = 0 + + private var fs: Fs = _ + + def this(resultSet: ResultSet[K, V], value: String) = { + this(resultSet, new ByteArrayInputStream(value.getBytes(Dolphin.CHAR_SET))) + } + + def init(): Unit = { + val resType = Dolphin.getType(inputStream) + if (resultSet.resultSetType != resType) { + throw new IOException( + "File type does not match(文件类型不匹配): " + ResultSetFactory.resultSetType + .getOrElse(resType, "TABLE") + ) + } + } + + /** + * Read a row of data Read the line length first Get the entire row of data by the length of the + * line, first obtain the column length in the entire row of data, and then divide into column + * length to split the data 读取一行数据 先读取行长度 通过行长度获取整行数据,在整行数据中先获取列长度,进而分割成列长度从而分割数据 + * @return + */ + def readLine(): Array[Byte] = { + + var rowLen = 0 + try rowLen = Dolphin.readInt(inputStream) + catch { + case _: StorageWarnException => logger.info(s"Read finished(读取完毕)"); return null + case t: Throwable => throw t + } + + if (rowLen > LinkisStorageConf.LINKIS_READ_ROW_BYTE_MAX_LEN) { + throw new ColLengthExceedException( + LinkisStorageErrorCodeSummary.RESULT_ROW_LENGTH.getErrorCode, + MessageFormat.format( + LinkisStorageErrorCodeSummary.RESULT_ROW_LENGTH.getErrorDesc, + rowLen.asInstanceOf[Object], + LinkisStorageConf.LINKIS_READ_ROW_BYTE_MAX_LEN.asInstanceOf[Object] + ) + ) + } + + var bytes: Array[Byte] = null + try { + bytes = new Array[Byte](rowLen) + } catch { + case e: OutOfMemoryError => + logger.error("Result set read oom, read size {} Byte", rowLen) + throw new StorageErrorException( + StorageErrorCode.FS_OOM.getCode, + StorageErrorCode.FS_OOM.getMessage, + e + ) + } + val len = StorageUtils.readBytes(inputStream, bytes, rowLen) + if (len != rowLen) { + throw new StorageErrorException( + StorageErrorCode.INCONSISTENT_DATA.getCode, + String.format(StorageErrorCode.INCONSISTENT_DATA.getMessage, len.toString, rowLen.toString) + ) + } + rowCount = rowCount + 1 + bytes + } + + @scala.throws[IOException] + override def getRecord: Record = { + if (metaData == null) throw new IOException("Must read metadata first(必须先读取metadata)") + if (row == null) { + throw new IOException( + "Can't get the value of the field, maybe the IO stream has been read or has been closed!(拿不到字段的值,也许IO流已读取完毕或已被关闭!)" + ) + } + row + } + + def setFs(fs: Fs): Unit = this.fs = fs + def getFs: Fs = this.fs + + @scala.throws[IOException] + override def getMetaData: MetaData = { + if (metaData == null) init() + metaData = deserializer.createMetaData(readLine()) + metaData + } + + @scala.throws[IOException] + override def skip(recordNum: Int): Int = { + if (recordNum < 0) return -1 + + if (metaData == null) getMetaData + for (i <- recordNum until (0, -1)) { + try inputStream.skip(Dolphin.readInt(inputStream)) + catch { + case t: Throwable => + return recordNum - i + } + } + recordNum + } + + @scala.throws[IOException] + override def getPosition: Long = rowCount + + @scala.throws[IOException] + override def hasNext: Boolean = { + if (metaData == null) getMetaData + + val line = readLine() + if (line == null) return false + row = deserializer.createRecord(line) + if (row == null) return false + true + } + + @scala.throws[IOException] + override def available: Long = inputStream.available() + + override def close(): Unit = { + inputStream.close() + if (this.fs != null) Utils.tryQuietly(this.fs.close()) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala new file mode 100644 index 0000000000..caed8c0ea0 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala @@ -0,0 +1,230 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset + +import org.apache.linkis.common.io.{Fs, FsPath, MetaData, Record} +import org.apache.linkis.common.io.resultset.{ResultSerializer, ResultSet, ResultSetWriter} +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.conf.LinkisStorageConf +import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.fs.impl.HDFSFileSystem +import org.apache.linkis.storage.utils.{FileSystemUtils, StorageUtils} + +import org.apache.commons.io.IOUtils +import org.apache.hadoop.hdfs.client.HdfsDataOutputStream + +import java.io.{IOException, OutputStream} + +import scala.collection.mutable.ArrayBuffer + +class StorageResultSetWriter[K <: MetaData, V <: Record]( + resultSet: ResultSet[K, V], + maxCacheSize: Long, + storePath: FsPath +) extends ResultSetWriter[K, V]( + resultSet = resultSet, + maxCacheSize = maxCacheSize, + storePath = storePath + ) + with Logging { + + private val serializer: ResultSerializer = resultSet.createResultSetSerializer + + private var moveToWriteRow = false + + private var outputStream: OutputStream = _ + + private var rowCount = 0 + + private val buffer = new ArrayBuffer[Byte]() + + private var fs: Fs = _ + + private var rMetaData: MetaData = _ + + private var proxyUser: String = StorageUtils.getJvmUser + + private var fileCreated = false + + private var closed = false + + private val WRITER_LOCK_CREATE = new Object() + + private val WRITER_LOCK_CLOSE = new Object() + + def getMetaData: MetaData = rMetaData + + def setProxyUser(proxyUser: String): Unit = { + this.proxyUser = proxyUser + } + + def isEmpty: Boolean = { + rMetaData == null && buffer.length <= Dolphin.FILE_EMPTY + } + + def init(): Unit = { + writeLine(resultSet.getResultSetHeader, true) + } + + def createNewFile: Unit = { + if (!fileCreated) { + WRITER_LOCK_CREATE.synchronized { + if (!fileCreated) { + if (storePath != null && outputStream == null) { + logger.info(s"Try to create a new file:${storePath}, with proxy user:${proxyUser}") + fs = FSFactory.getFsByProxyUser(storePath, proxyUser) + fs.init(null) + FileSystemUtils.createNewFile(storePath, proxyUser, true) + + outputStream = fs.write(storePath, true) + fs match { + case fileSystem: FileSystem => + fileSystem.setPermission(storePath, "rwx------") + case _ => + } + logger.info(s"Succeed to create a new file:$storePath") + fileCreated = true + } + } + } + } else if (null != storePath && null == outputStream) { + logger.warn("outputStream had been set null, but createNewFile() was called again.") + } + } + + def writeLine(bytes: Array[Byte], cache: Boolean = false): Unit = { + if (closed) { + logger.warn("the writer had been closed, but writeLine() was still called.") + return + } + if (bytes.length > LinkisStorageConf.ROW_BYTE_MAX_LEN) { + throw new IOException( + s"A single row of data cannot exceed ${LinkisStorageConf.ROW_BYTE_MAX_LEN_STR}" + ) + } + if (buffer.length > maxCacheSize && !cache) { + if (outputStream == null) { + createNewFile + } + flush() + outputStream.write(bytes) + } else { + buffer.appendAll(bytes) + } + } + + override def toString: String = { + if (outputStream == null) { + if (isEmpty) return "" + new String(buffer.toArray, Dolphin.CHAR_SET) + } else { + storePath.getSchemaPath + } + } + + override def toFSPath: FsPath = storePath + + override def addMetaDataAndRecordString(content: String): Unit = { + if (!moveToWriteRow) { + val bytes = content.getBytes(Dolphin.CHAR_SET) + writeLine(bytes) + } + moveToWriteRow = true + } + + override def addRecordString(content: String): Unit = {} + + @scala.throws[IOException] + override def addMetaData(metaData: MetaData): Unit = { + if (!moveToWriteRow) { + { + rMetaData = metaData + init() + if (null == metaData) { + writeLine(serializer.metaDataToBytes(metaData), true) + } else { + writeLine(serializer.metaDataToBytes(metaData)) + } + } + moveToWriteRow = true + } + } + + @scala.throws[IOException] + override def addRecord(record: Record): Unit = { + if (moveToWriteRow) { + rowCount = rowCount + 1 + writeLine(serializer.recordToBytes(record)) + } + } + + def closeFs: Unit = { + if (fs != null) { + IOUtils.closeQuietly(fs) + fs = null + } + } + + override def close(): Unit = { + if (closed) { + logger.warn("the writer had been closed, but close() was still called.") + return + } else { + WRITER_LOCK_CLOSE.synchronized { + if (!closed) { + closed = true + } else { + return + } + } + } + Utils.tryFinally(if (outputStream != null) flush()) { + if (outputStream != null) { + IOUtils.closeQuietly(outputStream) + outputStream = null + } + closeFs + } + } + + override def flush(): Unit = { + createNewFile + if (outputStream != null) { + if (buffer.nonEmpty) { + outputStream.write(buffer.toArray) + buffer.clear() + } + Utils.tryAndWarnMsg[Unit] { + outputStream match { + case hdfs: HdfsDataOutputStream => + hdfs.hflush() + case _ => + outputStream.flush() + } + }(s"Error encounters when flush result set ") + } + if (closed) { + if (logger.isDebugEnabled()) { + logger.debug("the writer had been closed, but flush() was still called.") + } + } + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/html/HtmlResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/html/HtmlResultSet.scala new file mode 100644 index 0000000000..7a65a9b9d5 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/html/HtmlResultSet.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.html + +import org.apache.linkis.common.io.resultset.{ResultDeserializer, ResultSerializer} +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.resultset.{ResultSetFactory, StorageResultSet} +import org.apache.linkis.storage.resultset.txt.{TextResultDeserializer, TextResultSerializer} + +class HtmlResultSet extends StorageResultSet[LineMetaData, LineRecord] with Serializable { + + override def resultSetType(): String = ResultSetFactory.HTML_TYPE + + override def createResultSetSerializer(): ResultSerializer = new TextResultSerializer + + override def createResultSetDeserializer(): ResultDeserializer[LineMetaData, LineRecord] = + new TextResultDeserializer + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOMetaData.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOMetaData.scala new file mode 100644 index 0000000000..32b578c5a8 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOMetaData.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.io + +import org.apache.linkis.common.io.MetaData +import org.apache.linkis.storage.resultset.ResultMetaData + +class IOMetaData(val off: Int, val len: Int) extends ResultMetaData { + override def cloneMeta(): MetaData = new IOMetaData(off, len) +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IORecord.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IORecord.scala new file mode 100644 index 0000000000..54d3dc0224 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IORecord.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.io + +import org.apache.linkis.common.io.Record +import org.apache.linkis.storage.resultset.ResultRecord + +class IORecord(val value: Array[Byte]) extends ResultRecord { + override def cloneRecord(): Record = new IORecord(value) +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsReader.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultDeserializer.scala similarity index 56% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsReader.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultDeserializer.scala index 00b79eb7e4..4e8199da04 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/ScriptFsReader.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultDeserializer.scala @@ -15,26 +15,22 @@ * limitations under the License. */ -package org.apache.linkis.storage.script; +package org.apache.linkis.storage.resultset.io -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.common.io.FsReader; -import org.apache.linkis.storage.script.reader.StorageScriptFsReader; +import org.apache.linkis.common.io.resultset.ResultDeserializer +import org.apache.linkis.storage.domain.Dolphin -import java.io.InputStream; +import org.apache.commons.codec.binary.Base64 -public abstract class ScriptFsReader extends FsReader { +class IOResultDeserializer extends ResultDeserializer[IOMetaData, IORecord] { - protected FsPath path; - protected String charset; - - public ScriptFsReader(FsPath path, String charset) { - this.path = path; - this.charset = charset; + override def createMetaData(bytes: Array[Byte]): IOMetaData = { + val values = Dolphin.getString(bytes, 0, bytes.length).split(Dolphin.COL_SPLIT) + new IOMetaData(values(0).toInt, values(1).toInt) } - public static ScriptFsReader getScriptFsReader( - FsPath path, String charset, InputStream inputStream) { - return new StorageScriptFsReader(path, charset, inputStream); + override def createRecord(bytes: Array[Byte]): IORecord = { + new IORecord(Base64.decodeBase64(Dolphin.getString(bytes, 0, bytes.length))) } + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSerializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSerializer.scala new file mode 100644 index 0000000000..be527e3969 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSerializer.scala @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.io + +import org.apache.linkis.common.io.{MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSerializer +import org.apache.linkis.storage.domain.Dolphin + +import org.apache.commons.codec.binary.Base64 + +class IOResultSerializer extends ResultSerializer { + + override def metaDataToBytes(metaData: MetaData): Array[Byte] = { + val ioMetaData = metaData.asInstanceOf[IOMetaData] + lineToBytes(s"${ioMetaData.off}${Dolphin.COL_SPLIT}${ioMetaData.len}") + } + + override def recordToBytes(record: Record): Array[Byte] = { + val ioRecord = record.asInstanceOf[IORecord] + lineToBytes(Base64.encodeBase64String(ioRecord.value)) + } + + def lineToBytes(value: String): Array[Byte] = { + val bytes = if (value == null) Dolphin.NULL_BYTES else Dolphin.getBytes(value) + Dolphin.getIntBytes(bytes.length) ++ bytes + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSet.scala new file mode 100644 index 0000000000..5fd952dbf9 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/io/IOResultSet.scala @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.io + +import org.apache.linkis.common.io.resultset.{ResultDeserializer, ResultSerializer} +import org.apache.linkis.storage.resultset.{ResultSetFactory, StorageResultSet} + +class IOResultSet extends StorageResultSet[IOMetaData, IORecord] with Serializable { + + override def resultSetType(): String = ResultSetFactory.IO_TYPE + + override def createResultSetSerializer(): ResultSerializer = new IOResultSerializer + + override def createResultSetDeserializer(): ResultDeserializer[IOMetaData, IORecord] = + new IOResultDeserializer + +} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/label/LabelUpdateRequest.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/picture/PictureResultSet.scala similarity index 53% rename from linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/label/LabelUpdateRequest.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/picture/PictureResultSet.scala index 57d05200ae..95c1d3ad1f 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/label/LabelUpdateRequest.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/picture/PictureResultSet.scala @@ -15,22 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.manager.common.protocol.label; +package org.apache.linkis.storage.resultset.picture -import org.apache.linkis.manager.label.entity.Label; -import org.apache.linkis.protocol.message.RequestProtocol; +import org.apache.linkis.common.io.resultset.{ResultDeserializer, ResultSerializer} +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.resultset.{ResultSetFactory, StorageResultSet} +import org.apache.linkis.storage.resultset.txt.{TextResultDeserializer, TextResultSerializer} -import java.util.List; +class PictureResultSet extends StorageResultSet[LineMetaData, LineRecord] with Serializable { -public class LabelUpdateRequest implements RequestProtocol { + override def resultSetType(): String = ResultSetFactory.PICTURE_TYPE - private final List> labels; + override def createResultSetSerializer(): ResultSerializer = new TextResultSerializer - public LabelUpdateRequest(List> labels) { - this.labels = labels; - } + override def createResultSetDeserializer(): ResultDeserializer[LineMetaData, LineRecord] = + new TextResultDeserializer - public List> getLabels() { - return labels; - } } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/RequestEngineUnlock.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableMetaData.scala similarity index 70% rename from linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/RequestEngineUnlock.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableMetaData.scala index 9646633e3d..526078d6c9 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/RequestEngineUnlock.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableMetaData.scala @@ -15,22 +15,16 @@ * limitations under the License. */ -package org.apache.linkis.manager.common.protocol; +package org.apache.linkis.storage.resultset.table -public class RequestEngineUnlock implements EngineLock { - private String lock; +import org.apache.linkis.common.io.MetaData +import org.apache.linkis.storage.domain.Column +import org.apache.linkis.storage.resultset.ResultMetaData - public RequestEngineUnlock() {} +class TableMetaData(val columns: Array[Column]) extends ResultMetaData { - public RequestEngineUnlock(String lock) { - this.lock = lock; + override def cloneMeta(): MetaData = { + new TableMetaData(columns) } - public String getLock() { - return lock; - } - - public void setLock(String lock) { - this.lock = lock; - } } diff --git a/linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/PythonExecuteError.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala similarity index 71% rename from linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/PythonExecuteError.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala index 8a45ea078d..64d2b18b9c 100644 --- a/linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/exception/PythonExecuteError.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala @@ -15,12 +15,16 @@ * limitations under the License. */ -package org.apache.linkis.manager.engineplugin.python.exception; +package org.apache.linkis.storage.resultset.table -import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.io.Record +import org.apache.linkis.storage.resultset.ResultRecord +import org.apache.linkis.storage.utils.StorageUtils -public class PythonExecuteError extends ErrorException { - public PythonExecuteError(int errCode, String desc) { - super(errCode, desc); +class TableRecord(val row: Array[Any]) extends ResultRecord { + + override def cloneRecord(): Record = { + new TableRecord(row) } + } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala new file mode 100644 index 0000000000..86d09e9532 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala @@ -0,0 +1,156 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.table + +import org.apache.linkis.common.io.resultset.ResultDeserializer +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.conf.LinkisStorageConf +import org.apache.linkis.storage.domain.{Column, DataType, Dolphin} +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.PARSING_METADATA_FAILED +import org.apache.linkis.storage.exception.{ + ColLengthExceedException, + ColumnIndexExceedException, + StorageErrorException +} + +import org.apache.commons.lang3.StringUtils + +import java.text.MessageFormat + +import scala.collection.mutable.ArrayBuffer + +class TableResultDeserializer extends ResultDeserializer[TableMetaData, TableRecord] with Logging { + + var metaData: TableMetaData = _ + + var columnSet: Set[Int] = null + + import DataType._ + + override def createMetaData(bytes: Array[Byte]): TableMetaData = { + val colByteLen = Dolphin.getString(bytes, 0, Dolphin.INT_LEN).toInt + val colString = Dolphin.getString(bytes, Dolphin.INT_LEN, colByteLen) + val colArray = + if (colString.endsWith(Dolphin.COL_SPLIT)) { + colString.substring(0, colString.length - 1).split(Dolphin.COL_SPLIT) + } else colString.split(Dolphin.COL_SPLIT) + var index = Dolphin.INT_LEN + colByteLen + if (colArray.length % 3 != 0) { + throw new StorageErrorException( + PARSING_METADATA_FAILED.getErrorCode, + PARSING_METADATA_FAILED.getErrorDesc + ) + } + val columns = new ArrayBuffer[Column]() + for (i <- 0 until (colArray.length, 3)) { + var len = colArray(i).toInt + val colName = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) + index += len + len = colArray(i + 1).toInt + val colType = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) + index += len + len = colArray(i + 2).toInt + val colComment = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) + index += len + columns += Column(colName, colType, colComment) + } + metaData = new TableMetaData(columns.toArray) + metaData + } + + /** + * colByteLen:All column fields are long(所有列字段长 记录的长度) colString:Obtain column + * length(获得列长):10,20,21 colArray:Column length array(列长数组) Get data by column length(通过列长获得数据) + * @param bytes + * @return + */ + override def createRecord(bytes: Array[Byte]): TableRecord = { + val colByteLen = Dolphin.getString(bytes, 0, Dolphin.INT_LEN).toInt + val colString = Dolphin.getString(bytes, Dolphin.INT_LEN, colByteLen) + val colArray = + if (colString.endsWith(Dolphin.COL_SPLIT)) { + colString.substring(0, colString.length - 1).split(Dolphin.COL_SPLIT) + } else colString.split(Dolphin.COL_SPLIT) + var index = Dolphin.INT_LEN + colByteLen + var enableLimit: Boolean = false + if (StringUtils.isNotBlank(LinkisStorageConf.enableLimitThreadLocal.get())) { + enableLimit = true + } + val columnIndices: Array[Int] = LinkisStorageConf.columnIndicesThreadLocal.get() + if (columnSet == null && columnIndices != null) { + columnSet = columnIndices.toSet + } + + val lastIndex = + if (columnIndices != null && columnIndices.length > 0) columnIndices(columnIndices.length - 1) + else 0 + var realValueSize = colArray.size + + if (enableLimit && metaData.columns.size <= columnIndices(0)) { + throw new ColumnIndexExceedException( + LinkisStorageErrorCodeSummary.RESULT_COLUMN_INDEX_OUT_OF_BOUNDS.getErrorCode, + MessageFormat.format( + LinkisStorageErrorCodeSummary.RESULT_COLUMN_INDEX_OUT_OF_BOUNDS.getErrorDesc, + columnIndices(0).asInstanceOf[Object], + metaData.columns.size.asInstanceOf[Object] + ) + ) + } + + if (enableLimit && metaData.columns.size > lastIndex) { + realValueSize = columnIndices.length + } else if (enableLimit && metaData.columns.size <= lastIndex) { + realValueSize = metaData.columns.size % columnIndices.length + } + + val columnSize = colArray.size + val rowArray = new Array[Any](realValueSize) + + var colIdx = 0 + for (i <- 0 until columnSize) { + val len = colArray(i).toInt + val res = Dolphin.getString(bytes, index, len) + if (res.length > LinkisStorageConf.LINKIS_RESULT_COL_LENGTH && enableLimit) { + throw new ColLengthExceedException( + LinkisStorageErrorCodeSummary.RESULT_COL_LENGTH.getErrorCode, + MessageFormat.format( + LinkisStorageErrorCodeSummary.RESULT_COL_LENGTH.getErrorDesc, + res.length.asInstanceOf[Object], + LinkisStorageConf.LINKIS_RESULT_COL_LENGTH.asInstanceOf[Object] + ) + ) + } + index += len + // 如果enableLimit为true,则采取的是列分页 + if (enableLimit) { + if (columnSet.contains(i)) { + rowArray(colIdx) = toValue(metaData.columns(i).dataType, res) + colIdx += 1 + } + } else { + if (i >= metaData.columns.length) rowArray(i) = res + else { + rowArray(i) = toValue(metaData.columns(i).dataType, res) + } + } + } + new TableRecord(rowArray) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala new file mode 100644 index 0000000000..5d1738a346 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.table + +import org.apache.linkis.common.io.{MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSerializer +import org.apache.linkis.storage.conf.LinkisStorageConf +import org.apache.linkis.storage.domain.Dolphin + +import scala.collection.mutable.ArrayBuffer + +class TableResultSerializer extends ResultSerializer { + + override def metaDataToBytes(metaData: MetaData): Array[Byte] = { + val tableMetaData = metaData.asInstanceOf[TableMetaData] + lineToBytes(tableMetaData.columns.map(_.toArray).reduce((a1, a2) => a1 ++ a2)) + } + + override def recordToBytes(record: Record): Array[Byte] = { + val tableRecord = record.asInstanceOf[TableRecord] + lineToBytes(tableRecord.row) + } + + /** + * Convert a row of data to an array of Bytes Convert the data to byte and get the corresponding + * total byte length to write to the file Data write format: line length (fixed length) column + * length (fixed length) field index comma segmentation real data For example: + * 000000004900000000116,10,3,4,5,peace1johnnwang1101true11.51 The length of the line does not + * include its own length 将一行数据转换为Bytes的数组 对数据转换为byte,并获取相应的总byte长度写入文件 数据写入格式:行长(固定长度) 列长(固定长度) + * 字段索引逗号分割 真实数据 如:000000004900000000116,10,3,4,5,peace1johnnwang1101true11.51 其中行长不包括自身长度 + * @param line + */ + def lineToBytes(line: Array[Any]): Array[Byte] = { + val dataBytes = ArrayBuffer[Array[Byte]]() + val colIndex = ArrayBuffer[Array[Byte]]() + var colByteLen = 0 + var length = 0 + line.foreach { data => + val bytes = if (data == null) { + if (!LinkisStorageConf.LINKIS_RESULT_ENABLE_NULL) { + Dolphin.LINKIS_NULL_BYTES + } else { + Dolphin.NULL_BYTES + } + } else { + Dolphin.getBytes(data) + } + dataBytes += bytes + val colBytes = Dolphin.getBytes(bytes.length) + colIndex += colBytes += Dolphin.COL_SPLIT_BYTES + colByteLen += colBytes.length + Dolphin.COL_SPLIT_LEN + length += bytes.length + } + length += colByteLen + Dolphin.INT_LEN + toByteArray(length, colByteLen, colIndex, dataBytes) + } + + /** + * Splice a row of data into a byte array(将一行的数据拼接成byte数组) + * @param length + * The total length of the line data byte, excluding its own length(行数据byte总长度,不包括自身的长度) + * @param colByteLen + * Record field index byte column length(记录字段索引byte的列长) + * @param colIndex + * Field index, including separator comma(字段索引,包括分割符逗号) + * @param dataBytes + * Byte of real data(真实数据的byte) + * @return + */ + def toByteArray( + length: Int, + colByteLen: Int, + colIndex: ArrayBuffer[Array[Byte]], + dataBytes: ArrayBuffer[Array[Byte]] + ): Array[Byte] = { + val row = ArrayBuffer[Byte]() + colIndex ++= dataBytes + row.appendAll(Dolphin.getIntBytes(length)) + row.appendAll(Dolphin.getIntBytes(colByteLen)) + colIndex.foreach(row.appendAll(_)) + row.toArray + } + +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableMetaData.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSet.scala similarity index 57% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableMetaData.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSet.scala index 429ab33c82..fe8c4e9cd1 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/table/TableMetaData.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSet.scala @@ -15,30 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.storage.resultset.table; +package org.apache.linkis.storage.resultset.table -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.storage.domain.Column; -import org.apache.linkis.storage.resultset.ResultMetaData; +import org.apache.linkis.common.io.resultset.{ResultDeserializer, ResultSerializer} +import org.apache.linkis.storage.resultset.{ResultSetFactory, StorageResultSet} -public class TableMetaData implements ResultMetaData { +class TableResultSet extends StorageResultSet[TableMetaData, TableRecord] with Serializable { - public Column[] columns; + override def resultSetType(): String = ResultSetFactory.TABLE_TYPE - public TableMetaData(Column[] columns) { - this.columns = columns; - } + override def createResultSetSerializer(): ResultSerializer = new TableResultSerializer - public Column[] getColumns() { - return columns; - } + override def createResultSetDeserializer(): ResultDeserializer[TableMetaData, TableRecord] = + new TableResultDeserializer - public void setColumns(Column[] columns) { - this.columns = columns; - } - - @Override - public MetaData cloneMeta() { - return new TableMetaData(columns.clone()); - } } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.scala new file mode 100644 index 0000000000..b306b1f29a --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultDeserializer.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.txt + +import org.apache.linkis.common.io.resultset.ResultDeserializer +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.domain.Dolphin + +class TextResultDeserializer extends ResultDeserializer[LineMetaData, LineRecord] { + + override def createMetaData(bytes: Array[Byte]): LineMetaData = { + new LineMetaData(Dolphin.getString(bytes, 0, bytes.length)) + } + + override def createRecord(bytes: Array[Byte]): LineRecord = { + new LineRecord(Dolphin.getString(bytes, 0, bytes.length)) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSerializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSerializer.scala new file mode 100644 index 0000000000..e6e53338d0 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSerializer.scala @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.txt + +import org.apache.linkis.common.io.{MetaData, Record} +import org.apache.linkis.common.io.resultset.ResultSerializer +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.domain.Dolphin + +class TextResultSerializer extends ResultSerializer { + + override def metaDataToBytes(metaData: MetaData): Array[Byte] = { + if (metaData == null) { + lineToBytes(null) + } else { + val textMetaData = metaData.asInstanceOf[LineMetaData] + lineToBytes(textMetaData.getMetaData) + } + } + + override def recordToBytes(record: Record): Array[Byte] = { + val textRecord = record.asInstanceOf[LineRecord] + lineToBytes(textRecord.getLine) + } + + def lineToBytes(value: String): Array[Byte] = { + val bytes = if (value == null) Dolphin.NULL_BYTES else Dolphin.getBytes(value) + Dolphin.getIntBytes(bytes.length) ++ bytes + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSet.scala new file mode 100644 index 0000000000..00beb315c5 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/txt/TextResultSet.scala @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.resultset.txt + +import org.apache.linkis.common.io.resultset.{ResultDeserializer, ResultSerializer} +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.resultset.{ResultSetFactory, StorageResultSet} + +class TextResultSet extends StorageResultSet[LineMetaData, LineRecord] with Serializable { + + override def resultSetType(): String = ResultSetFactory.TEXT_TYPE + + override def createResultSetSerializer(): ResultSerializer = new TextResultSerializer + + override def createResultSetDeserializer(): ResultDeserializer[LineMetaData, LineRecord] = + new TextResultDeserializer + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsReader.scala new file mode 100644 index 0000000000..d89074b978 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsReader.scala @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script + +import org.apache.linkis.common.io.{FsPath, FsReader} +import org.apache.linkis.storage.script.reader.StorageScriptFsReader + +import java.io.InputStream + +abstract class ScriptFsReader extends FsReader { + + val path: FsPath + val charset: String + +} + +object ScriptFsReader { + + def getScriptFsReader(path: FsPath, charset: String, inputStream: InputStream): ScriptFsReader = + new StorageScriptFsReader(path, charset, inputStream) + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsWriter.scala new file mode 100644 index 0000000000..dbcbfc126e --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/ScriptFsWriter.scala @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script + +import org.apache.linkis.common.io.{FsPath, FsWriter, MetaData} +import org.apache.linkis.storage.LineRecord +import org.apache.linkis.storage.script.compaction.{ + PYScriptCompaction, + QLScriptCompaction, + ScalaScriptCompaction, + ShellScriptCompaction +} +import org.apache.linkis.storage.script.parser.{ + PYScriptParser, + QLScriptParser, + ScalaScriptParser, + ShellScriptParser +} +import org.apache.linkis.storage.script.writer.StorageScriptFsWriter + +import java.io.{InputStream, OutputStream} + +abstract class ScriptFsWriter extends FsWriter { + + val path: FsPath + val charset: String + + def getInputStream(): InputStream + +} + +object ScriptFsWriter { + + def getScriptFsWriter( + path: FsPath, + charset: String, + outputStream: OutputStream = null + ): ScriptFsWriter = + new StorageScriptFsWriter(path, charset, outputStream) + +} + +object ParserFactory { + + def listParsers(): Array[Parser] = + Array(PYScriptParser(), QLScriptParser(), ScalaScriptParser(), ShellScriptParser()) + +} + +object Compaction { + + def listCompactions(): Array[Compaction] = Array( + PYScriptCompaction(), + QLScriptCompaction(), + ScalaScriptCompaction(), + ShellScriptCompaction() + ) + +} + +trait Parser { + def prefixConf: String + + def prefix: String + + def belongTo(suffix: String): Boolean + + def parse(line: String): Variable + + def getAnnotationSymbol(): String +} + +trait Compaction { + + def prefixConf: String + + def prefix: String + + def belongTo(suffix: String): Boolean + + def compact(variable: Variable): String + + def getAnnotationSymbol(): String +} + +class ScriptMetaData(var variables: Array[Variable]) extends MetaData { + override def cloneMeta(): MetaData = new ScriptMetaData(variables) + + def getMetaData: Array[Variable] = variables + + def setMetaData(variables: Array[Variable]): Unit = { + this.variables = variables + } + +} + +class ScriptRecord(line: String) extends LineRecord(line) + +// definition variable; specialConfiguration ;runConfiguration; startUpConfiguration; +case class Variable(sortParent: String, sort: String, key: String, value: String) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala new file mode 100644 index 0000000000..ac89d19ea2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script + +import java.util + +import scala.collection.mutable.ArrayBuffer + +object VariableParser { + + val CONFIGURATION: String = "configuration" + val VARIABLE: String = "variable" + val RUNTIME: String = "runtime" + val STARTUP: String = "startup" + val SPECIAL: String = "special" + + def getVariables(params: util.Map[String, Object]): Array[Variable] = { + import scala.collection.JavaConverters._ + val variables = new ArrayBuffer[Variable] + params + .getOrDefault(VARIABLE, new util.HashMap[String, Object]) + .asInstanceOf[util.Map[String, Object]] + .asScala + .foreach(f => variables += Variable(VARIABLE, null, f._1, f._2.toString)) + params + .getOrDefault(CONFIGURATION, new util.HashMap[String, Object]) + .asInstanceOf[util.Map[String, Object]] + .asScala + .foreach { f => + f._2 + .asInstanceOf[util.Map[String, Object]] + .asScala + .filter(s => !isContextIDINFO(s._1)) + .foreach(p => + p._2 match { + case e: util.Map[String, Object] => + e.asScala + .filter(s => !isContextIDINFO(s._1)) + .foreach(s => variables += Variable(f._1, p._1, s._1, s._2.toString)) + case _ => + if (null == p._2) { + variables += Variable(CONFIGURATION, f._1, p._1, "") + } else { + variables += Variable(CONFIGURATION, f._1, p._1, p._2.toString) + } + } + ) + } + variables.toArray + } + + // TODO need delete + def isContextIDINFO(key: String): Boolean = { + "contextID".equalsIgnoreCase(key) || "nodeName".equalsIgnoreCase(key) + } + + def getMap(variables: Array[Variable]): util.Map[String, Object] = { + import scala.collection.JavaConverters._ + val vars = new util.HashMap[String, String] + val confs = new util.HashMap[String, Object] + variables.filter(_.sort == null).foreach(f => vars.put(f.key, f.value)) + variables.filter(_.sort != null).foreach { f => + f.sort match { + case STARTUP | RUNTIME | SPECIAL => + if (confs.get(f.sort) == null) { + confs.put(f.sort, createMap(f)) + } else { + confs.get(f.sort).asInstanceOf[util.HashMap[String, Object]].put(f.key, f.value) + } + case _ => + if (confs.get(f.sortParent) == null) { + confs.put(f.sortParent, new util.HashMap[String, Object]) + confs + .get(f.sortParent) + .asInstanceOf[util.HashMap[String, Object]] + .put(f.sort, createMap(f)) + } else { + val subMap = confs.get(f.sortParent).asInstanceOf[util.HashMap[String, Object]] + if (subMap.get(f.sort) == null) { + subMap.put(f.sort, createMap(f)) + } else { + subMap + .get(f.sort) + .asInstanceOf[util.HashMap[String, Object]] + .put(f.key, f.value) + } + } + } + } + val params = new util.HashMap[String, Object] + if (vars.size() > 0) params.asScala += VARIABLE -> vars + if (confs.size() > 0) params.asScala += CONFIGURATION -> confs + params + } + + private def createMap(variable: Variable): util.Map[String, Object] = { + val map = new util.HashMap[String, Object] + map.put(variable.key, variable.value) + map + } + +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/LineMetaData.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.scala similarity index 60% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/LineMetaData.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.scala index a6dab8b37d..e085d2aacc 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/LineMetaData.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.scala @@ -15,30 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.storage; +package org.apache.linkis.storage.script.compaction -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.storage.resultset.ResultMetaData; +import org.apache.linkis.storage.script.{Compaction, Variable} -public class LineMetaData implements ResultMetaData { +abstract class CommonScriptCompaction extends Compaction { - private String metaData = null; - - public LineMetaData() {} - - public LineMetaData(String metaData) { - this.metaData = metaData; - } - - public String getMetaData() { - return metaData; + override def compact(variable: Variable): String = { + variable.sortParent match { + case "variable" => prefix + " " + variable.key + "=" + variable.value + case _ => + prefixConf + " " + variable.sortParent + " " + variable.sort + " " + variable.key + "=" + variable.value + } } - public void setMetaData(String metaData) { - this.metaData = metaData; - } + override def getAnnotationSymbol(): String = prefix.split('@')(0) - public MetaData cloneMeta() { - return new LineMetaData(metaData); - } } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/PYScriptCompaction.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/PYScriptCompaction.scala new file mode 100644 index 0000000000..a8e72a2d8f --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/PYScriptCompaction.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.compaction + +import org.apache.linkis.common.utils.CodeAndRunTypeUtils + +class PYScriptCompaction private extends CommonScriptCompaction { + + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON + ) + + override def prefix: String = "#@set" + + override def prefixConf: String = "#conf@set" +} + +object PYScriptCompaction { + val pYScriptCompaction: PYScriptCompaction = new PYScriptCompaction + + def apply(): CommonScriptCompaction = pYScriptCompaction +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/QLScriptCompaction.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/QLScriptCompaction.scala new file mode 100644 index 0000000000..7e420f8913 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/QLScriptCompaction.scala @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.compaction + +import org.apache.linkis.common.utils.CodeAndRunTypeUtils + +class QLScriptCompaction private extends CommonScriptCompaction { + + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL + ) + + override def prefix: String = "--@set" + + override def prefixConf: String = "--conf@set" +} + +object QLScriptCompaction { + val qLScriptCompaction: QLScriptCompaction = new QLScriptCompaction + def apply(): CommonScriptCompaction = qLScriptCompaction +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.scala new file mode 100644 index 0000000000..c75c5a32a5 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ScalaScriptCompaction.scala @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.compaction + +import org.apache.linkis.common.utils.CodeAndRunTypeUtils + +class ScalaScriptCompaction private extends CommonScriptCompaction { + override def prefix: String = "//@set" + + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA + ) + + override def prefixConf: String = "//conf@set" +} + +object ScalaScriptCompaction { + private val compaction: ScalaScriptCompaction = new ScalaScriptCompaction + + def apply(): CommonScriptCompaction = compaction +} diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelFsWriter.java b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.scala similarity index 56% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelFsWriter.java rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.scala index cd3969e048..7a05580038 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelFsWriter.java +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/compaction/ShellScriptCompaction.scala @@ -15,27 +15,25 @@ * limitations under the License. */ -package org.apache.linkis.storage.excel; +package org.apache.linkis.storage.script.compaction -import org.apache.linkis.common.io.FsWriter; +import org.apache.linkis.common.utils.CodeAndRunTypeUtils -import java.io.OutputStream; +class ShellScriptCompaction private extends CommonScriptCompaction { + override def prefixConf: String = "#conf@set" -public abstract class ExcelFsWriter extends FsWriter { - public abstract String getCharset(); + override def prefix: String = "#@set" - public abstract String getSheetName(); + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL + ) - public abstract String getDateFormat(); +} - public abstract boolean isAutoFormat(); +object ShellScriptCompaction { + val shellScriptCompaction: ShellScriptCompaction = new ShellScriptCompaction - public static ExcelFsWriter getExcelFsWriter( - String charset, - String sheetName, - String dateFormat, - OutputStream outputStream, - boolean autoFormat) { - return new StorageExcelWriter(charset, sheetName, dateFormat, outputStream, autoFormat); - } + def apply(): CommonScriptCompaction = shellScriptCompaction } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala new file mode 100644 index 0000000000..b23a521cd2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/CommonScriptParser.scala @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.parser + +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.INVALID_CUSTOM_PARAMETER +import org.apache.linkis.storage.exception.StorageErrorException +import org.apache.linkis.storage.script.{Parser, Variable, VariableParser} + +abstract class CommonScriptParser extends Parser { + + @scala.throws[StorageErrorException] + def parse(line: String): Variable = { + val variableReg = ("\\s*" + prefix + "\\s*(.+)\\s*" + "=" + "\\s*(.+)\\s*").r + line match { + case variableReg(key, value) => + Variable(VariableParser.VARIABLE, null, key.trim, value.trim) + case _ => + val split = line.split("=") + if (split.length != 2) { + throw new StorageErrorException( + INVALID_CUSTOM_PARAMETER.getErrorCode(), + INVALID_CUSTOM_PARAMETER.getErrorDesc + ) + } + val value = split(1).trim + val subSplit = split(0).split(" ") + if (subSplit.filter(_ != "").size != 4) { + throw new StorageErrorException( + INVALID_CUSTOM_PARAMETER.getErrorCode(), + INVALID_CUSTOM_PARAMETER.getErrorDesc + ) + } + if (!subSplit.filter(_ != "")(0).equals(prefixConf)) { + throw new StorageErrorException( + INVALID_CUSTOM_PARAMETER.getErrorCode(), + INVALID_CUSTOM_PARAMETER.getErrorDesc + ) + } + val sortParent = subSplit.filter(_ != "")(1).trim + val sort = subSplit.filter(_ != "")(2).trim + val key = subSplit.filter(_ != "")(3).trim + Variable(sortParent, sort, key, value) + } + } + + override def getAnnotationSymbol(): String = prefix.split('@')(0) + +} diff --git a/linkis-public-enhancements/linkis-pes-common/src/test/scala/org/apache/linkis/errorcode/common/LinkisErrorCodeTest.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/PYScriptParser.scala similarity index 59% rename from linkis-public-enhancements/linkis-pes-common/src/test/scala/org/apache/linkis/errorcode/common/LinkisErrorCodeTest.scala rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/PYScriptParser.scala index 6e82f0ebcc..027b632146 100644 --- a/linkis-public-enhancements/linkis-pes-common/src/test/scala/org/apache/linkis/errorcode/common/LinkisErrorCodeTest.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/PYScriptParser.scala @@ -15,21 +15,25 @@ * limitations under the License. */ -package org.apache.linkis.errorcode.common +package org.apache.linkis.storage.script.parser -import org.junit.jupiter.api.{Assertions, DisplayName, Test} +import org.apache.linkis.common.utils.CodeAndRunTypeUtils -class LinkisErrorCodeTest { +class PYScriptParser private extends CommonScriptParser { + override def prefix: String = "#@set" - @Test - @DisplayName("linkisErrorCodeTest") - def linkisErrorCodeTest(): Unit = { - val errorCode = new LinkisErrorCode( - "11000", - "Failed to get datasource info from datasource server(从数据源服务器获取数据源信息失败)" + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON ) - Assertions.assertEquals("11000", errorCode.getErrorCode) - } + override def prefixConf: String = "#conf@set" +} + +object PYScriptParser { + val pYScriptParser: PYScriptParser = new PYScriptParser + + def apply(): CommonScriptParser = pYScriptParser } diff --git a/linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/response/ScriptFromBMLResponseTest.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/QLScriptParser.scala similarity index 59% rename from linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/response/ScriptFromBMLResponseTest.scala rename to linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/QLScriptParser.scala index c4e9e386cb..a089f9c367 100644 --- a/linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/response/ScriptFromBMLResponseTest.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/QLScriptParser.scala @@ -15,24 +15,24 @@ * limitations under the License. */ -package org.apache.linkis.filesystem.response +package org.apache.linkis.storage.script.parser -import org.junit.jupiter.api.{Assertions, DisplayName, Test} +import org.apache.linkis.common.utils.CodeAndRunTypeUtils -class ScriptFromBMLResponseTest { +class QLScriptParser private extends CommonScriptParser { + override def prefix: String = "--@set" - @Test - @DisplayName("commonConst") - def commonConst(): Unit = { - val scriptContent = "show databases;" - val response = new ScriptFromBMLResponse( - scriptContent, - new java.util.HashMap[String, java.util.Map[String, Object]] + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL ) - Assertions.assertEquals(scriptContent, response.scriptContent) - Assertions.assertTrue(response.metadata.size() == 0) + override def prefixConf: String = "--conf@set" +} - } +object QLScriptParser { + val qLScriptParser: QLScriptParser = new QLScriptParser + def apply(): CommonScriptParser = qLScriptParser } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ScalaScriptParser.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ScalaScriptParser.scala new file mode 100644 index 0000000000..e6729326e8 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ScalaScriptParser.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.parser + +import org.apache.linkis.common.utils.CodeAndRunTypeUtils + +class ScalaScriptParser private extends CommonScriptParser { + // todo To be determined(待定) + override def prefix: String = "//@set" + + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA + ) + + override def prefixConf: String = "//conf@set" +} + +object ScalaScriptParser { + val otherScriptParser: ScalaScriptParser = new ScalaScriptParser + + def apply(): CommonScriptParser = otherScriptParser +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ShellScriptParser.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ShellScriptParser.scala new file mode 100644 index 0000000000..9b3e385ffe --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/parser/ShellScriptParser.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.parser + +import org.apache.linkis.common.utils.CodeAndRunTypeUtils + +class ShellScriptParser private extends CommonScriptParser { + override def prefix: String = "#@set" + + override def belongTo(suffix: String): Boolean = + CodeAndRunTypeUtils.getSuffixBelongToLanguageTypeOrNot( + suffix, + CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL + ) + + override def prefixConf: String = "#conf@set" +} + +object ShellScriptParser { + val shellScriptParser: ShellScriptParser = new ShellScriptParser + + def apply(): CommonScriptParser = shellScriptParser + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/reader/StorageScriptFsReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/reader/StorageScriptFsReader.scala new file mode 100644 index 0000000000..7b7b85ceb9 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/reader/StorageScriptFsReader.scala @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.reader + +import org.apache.linkis.common.io.{FsPath, MetaData, Record} +import org.apache.linkis.storage.script._ +import org.apache.linkis.storage.utils.StorageUtils + +import org.apache.commons.io.IOUtils + +import java.io._ + +import scala.collection.mutable.ArrayBuffer + +class StorageScriptFsReader(val path: FsPath, val charset: String, val inputStream: InputStream) + extends ScriptFsReader { + + private var inputStreamReader: InputStreamReader = _ + private var bufferedReader: BufferedReader = _ + + private var metadata: ScriptMetaData = _ + + private var variables: ArrayBuffer[Variable] = new ArrayBuffer[Variable]() + private var lineText: String = _ + + @scala.throws[IOException] + override def getRecord: Record = { + + if (metadata == null) throw new IOException("Must read metadata first(必须先读取metadata)") + val record = new ScriptRecord(lineText) + lineText = bufferedReader.readLine() + record + } + + @scala.throws[IOException] + override def getMetaData: MetaData = { + if (metadata == null) init() + val parser = getScriptParser() + lineText = bufferedReader.readLine() + while (hasNext && parser != null && isMetadata(lineText, parser.prefix, parser.prefixConf)) { + variables += parser.parse(lineText) + lineText = bufferedReader.readLine() + } + metadata = new ScriptMetaData(variables.toArray) + metadata + } + + def init(): Unit = { + inputStreamReader = new InputStreamReader(inputStream) + bufferedReader = new BufferedReader(inputStreamReader) + } + + @scala.throws[IOException] + override def skip(recordNum: Int): Int = { + if (recordNum < 0) return -1 + if (metadata == null) getMetaData + try bufferedReader.skip(recordNum).toInt + catch { case t: Throwable => recordNum } + } + + @scala.throws[IOException] + override def getPosition: Long = -1L + + @scala.throws[IOException] + override def hasNext: Boolean = lineText != null + + @scala.throws[IOException] + override def available: Long = if (inputStream != null) inputStream.available() else 0L + + override def close(): Unit = { + IOUtils.closeQuietly(bufferedReader) + IOUtils.closeQuietly(inputStreamReader) + IOUtils.closeQuietly(inputStream) + } + + /** + * Determine if the read line is metadata(判断读的行是否是metadata) + * + * @param line + * @return + */ + def isMetadata(line: String, prefix: String, prefixConf: String): Boolean = { + val regex = ("\\s*" + prefix + "\\s*(.+)\\s*" + "=" + "\\s*(.+)\\s*").r + line match { + case regex(_, _) => true + case _ => + val split: Array[String] = line.split("=") + if (split.size != 2) return false + if (split(0).split(" ").filter(_ != "").size != 4) return false + if (!split(0).split(" ").filter(_ != "")(0).equals(prefixConf)) return false + true + } + } + + /** + * get the script parser according to the path(根据文件路径 获取对应的script parser ) + * @return + * Scripts Parser + */ + + def getScriptParser(): Parser = { + val parsers = + ParserFactory.listParsers().filter(p => p.belongTo(StorageUtils.pathToSuffix(path.getPath))) + if (parsers.length > 0) { + parsers(0) + } else { + null + } + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.scala new file mode 100644 index 0000000000..cdb9186da4 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/writer/StorageScriptFsWriter.scala @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.script.writer + +import org.apache.linkis.common.io.{FsPath, MetaData, Record} +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.LineRecord +import org.apache.linkis.storage.script.{Compaction, ScriptFsWriter, ScriptMetaData} +import org.apache.linkis.storage.utils.{StorageConfiguration, StorageUtils} + +import org.apache.commons.io.IOUtils +import org.apache.hadoop.hdfs.client.HdfsDataOutputStream + +import java.io.{ByteArrayInputStream, InputStream, IOException, OutputStream} +import java.util + +class StorageScriptFsWriter( + val path: FsPath, + val charset: String, + outputStream: OutputStream = null +) extends ScriptFsWriter + with Logging { + + private val stringBuilder = new StringBuilder + + @scala.throws[IOException] + override def addMetaData(metaData: MetaData): Unit = { + + val metadataLine = new util.ArrayList[String]() + val compaction = getScriptCompaction() + if (compaction != null) { + + metaData + .asInstanceOf[ScriptMetaData] + .getMetaData + .map(compaction.compact) + .foreach(metadataLine.add) + // add annotition symbol + if (metadataLine.size() > 0) { + metadataLine.add(compaction.getAnnotationSymbol()) + } + if (outputStream != null) { + IOUtils.writeLines(metadataLine, "\n", outputStream, charset) + } else { + import scala.collection.JavaConverters._ + metadataLine.asScala.foreach(m => stringBuilder.append(s"$m\n")) + } + } + + } + + @scala.throws[IOException] + override def addRecord(record: Record): Unit = { + // 转成LineRecord而不是TableRecord是为了兼容非Table类型的结果集写到本类中 + val scriptRecord = record.asInstanceOf[LineRecord] + if (outputStream != null) { + IOUtils.write(scriptRecord.getLine, outputStream, charset) + } else { + stringBuilder.append(scriptRecord.getLine) + } + } + + override def close(): Unit = { + if (outputStream != null) { + IOUtils.closeQuietly(outputStream) + } + } + + override def flush(): Unit = if (outputStream != null) { + Utils.tryAndWarnMsg[Unit] { + outputStream match { + case hdfs: HdfsDataOutputStream => + hdfs.hflush() + case _ => + outputStream.flush() + } + }(s"Error encounters when flush script ") + } + + def getInputStream(): InputStream = { + new ByteArrayInputStream( + stringBuilder.toString().getBytes(StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue) + ) + } + + /** + * get the script compaction according to the path(根据文件路径 获取对应的script Compaction ) + * @return + * Scripts Compaction + */ + + def getScriptCompaction(): Compaction = { + + val compactions = Compaction + .listCompactions() + .filter(p => p.belongTo(StorageUtils.pathToSuffix(path.getPath))) + + if (compactions.length > 0) { + compactions(0) + } else { + null + } + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala new file mode 100644 index 0000000000..e0fa5b7f40 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/AbstractFileSource.scala @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.source + +import org.apache.linkis.common.io.{FsWriter, MetaData, Record} + +import org.apache.commons.io.IOUtils +import org.apache.commons.math3.util.Pair + +import java.util +import java.util.Arrays + +import scala.collection.JavaConverters._ + +abstract class AbstractFileSource(var fileSplits: Array[FileSplit]) extends FileSource { + + override def shuffle(s: Record => Record): FileSource = { + fileSplits.foreach(_.shuffler = s) + this + } + + override def page(page: Int, pageSize: Int): FileSource = { + fileSplits.foreach(_.page(page, pageSize)) + this + } + + override def addParams(params: util.Map[String, String]): FileSource = { + fileSplits.foreach(_.addParams(params)) + this + } + + override def addParams(key: String, value: String): FileSource = { + fileSplits.foreach(_.addParams(key, value)) + this + } + + override def getFileSplits: Array[FileSplit] = this.fileSplits + + override def getParams: util.Map[String, String] = + fileSplits.map(_.params.asScala).foldLeft(Map[String, String]())(_ ++ _).asJava + + override def write[K <: MetaData, V <: Record](fsWriter: FsWriter[K, V]): Unit = + fileSplits.foreach(_.write(fsWriter)) + + override def close(): Unit = this.fileSplits.foreach(IOUtils.closeQuietly) + + override def collect(): Array[Pair[Object, util.ArrayList[Array[String]]]] = + fileSplits.map(_.collect()) + + override def getTotalLine: Int = this.fileSplits.map(_.totalLine).sum + + override def getTypes: Array[String] = this.fileSplits.map(_.`type`) + + override def getFileInfo(needToCountRowNumber: Int = 5000): Array[Pair[Int, Int]] = + fileSplits.map(_.getFileInfo(needToCountRowNumber)) + + override def limitBytes(limitBytes: Long): FileSource = { + fileSplits.foreach((fileSplit: FileSplit) => fileSplit.setLimitBytes(limitBytes)) + this + } + + override def limitColumnLength(limitColumnLength: Int): FileSource = { + fileSplits.foreach((fileSplit: FileSplit) => fileSplit.setLimitColumnLength(limitColumnLength)) + this + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala new file mode 100644 index 0000000000..4b0b593db6 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala @@ -0,0 +1,160 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.source + +import org.apache.linkis.common.io._ +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.conf.LinkisStorageConf +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE +import org.apache.linkis.storage.exception.StorageErrorException +import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetReader} +import org.apache.linkis.storage.script.ScriptFsReader +import org.apache.linkis.storage.utils.StorageConfiguration + +import org.apache.commons.math3.util.Pair + +import java.io.{Closeable, InputStream} +import java.util + +trait FileSource extends Closeable { + + def shuffle(s: Record => Record): FileSource + + def page(page: Int, pageSize: Int): FileSource + + def collect(): Array[Pair[Object, util.ArrayList[Array[String]]]] + + def getFileInfo(needToCountRowNumber: Int = 5000): Array[Pair[Int, Int]] + + def write[K <: MetaData, V <: Record](fsWriter: FsWriter[K, V]): Unit + + def addParams(params: util.Map[String, String]): FileSource + + def addParams(key: String, value: String): FileSource + + def getParams: util.Map[String, String] + + def getTotalLine: Int + + def limitBytes(limitBytes: Long): FileSource + + def limitColumnLength(limitColumnLength: Int): FileSource + + def getTypes: Array[String] + + def getFileSplits: Array[FileSplit] +} + +object FileSource extends Logging { + + private val fileType = LinkisStorageConf.getFileTypeArr + private val suffixPredicate = (path: String, suffix: String) => path.endsWith(s".$suffix") + + def isResultSet(path: String): Boolean = { + suffixPredicate(path, fileType.head) + } + + def isResultSet(fsPath: FsPath): Boolean = { + isResultSet(fsPath.getPath) + } + + /** + * 目前只支持table多结果集 + * + * @param fsPaths + * @param fs + * @return + */ + def create(fsPaths: Array[FsPath], fs: Fs): FileSource = { + // 非table结果集的过滤掉 + val fileSplits = fsPaths.map(createResultSetFileSplit(_, fs)).filter(isTableResultSet) + new ResultsetFileSource(fileSplits) + } + + private def isTableResultSet(fileSplit: FileSplit): Boolean = + fileSplit.`type`.equals(ResultSetFactory.TABLE_TYPE) + + def isTableResultSet(fileSource: FileSource): Boolean = { + // 分片中全部为table结果集才返回true + fileSource.getFileSplits.forall(isTableResultSet) + } + + def create(fsPath: FsPath, fs: Fs): FileSource = { + if (!canRead(fsPath.getPath)) { + throw new StorageErrorException( + UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode, + UNSUPPORTED_OPEN_FILE_TYPE.getErrorDesc + ) + } + if (isResultSet(fsPath)) { + new ResultsetFileSource(Array(createResultSetFileSplit(fsPath, fs))) + } else { + new TextFileSource(Array(createTextFileSplit(fsPath, fs))) + } + } + + def create(fsPath: FsPath, is: InputStream): FileSource = { + if (!canRead(fsPath.getPath)) { + throw new StorageErrorException( + UNSUPPORTED_OPEN_FILE_TYPE.getErrorCode, + UNSUPPORTED_OPEN_FILE_TYPE.getErrorDesc + ) + } + if (isResultSet(fsPath)) { + new ResultsetFileSource(Array(createResultSetFileSplit(fsPath, is))) + } else { + new TextFileSource(Array(createTextFileSplit(fsPath, is))) + } + } + + private def createResultSetFileSplit(fsPath: FsPath, is: InputStream): FileSplit = { + val resultset = ResultSetFactory.getInstance.getResultSetByPath(fsPath) + val resultsetReader = ResultSetReader.getResultSetReader(resultset, is) + new FileSplit(resultsetReader, resultset.resultSetType()) + } + + private def createResultSetFileSplit(fsPath: FsPath, fs: Fs): FileSplit = { + logger.info(s"try create result set file split with path:${fsPath.getPath}") + val resultset = ResultSetFactory.getInstance.getResultSetByPath(fsPath, fs) + val resultsetReader = ResultSetReader.getResultSetReader(resultset, fs.read(fsPath)) + new FileSplit(resultsetReader, resultset.resultSetType()) + } + + private def createTextFileSplit(fsPath: FsPath, is: InputStream): FileSplit = { + val scriptFsReader = ScriptFsReader.getScriptFsReader( + fsPath, + StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue, + is + ) + new FileSplit(scriptFsReader) + } + + private def createTextFileSplit(fsPath: FsPath, fs: Fs): FileSplit = { + val scriptFsReader = ScriptFsReader.getScriptFsReader( + fsPath, + StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue, + fs.read(fsPath) + ) + new FileSplit(scriptFsReader) + } + + private def canRead(path: String): Boolean = { + fileType.exists(suffixPredicate(path, _)) + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala new file mode 100644 index 0000000000..f4163e8263 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala @@ -0,0 +1,202 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.source + +import org.apache.linkis.common.io.{FsReader, FsWriter, MetaData, Record} +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.domain.{Column, DataType} +import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord} +import org.apache.linkis.storage.script.{ScriptMetaData, VariableParser} +import org.apache.linkis.storage.script.reader.StorageScriptFsReader + +import org.apache.commons.io.IOUtils +import org.apache.commons.math3.util.Pair + +import java.io.Closeable +import java.util + +import scala.collection.JavaConverters._ + +class FileSplit( + var fsReader: FsReader[_ <: MetaData, _ <: Record], + var `type`: String = "script/text" +) extends Closeable { + + var start: Int = 0 + + var end: Int = -1 + + var count: Int = 0 + + var totalLine = 0 + + var shuffler: Record => Record = r => r + + var pageTrigger: Boolean = false + + var params: util.Map[String, String] = new util.HashMap[String, String] + + private var limitBytes = 0L + private var limitColumnLength = 0 + + def page(page: Int, pageSize: Int): Unit = { + if (!pageTrigger) { + start = (page - 1) * pageSize + end = pageSize * page - 1 + pageTrigger = true + } + } + + def addParams(params: util.Map[String, String]): Unit = { + this.params.putAll(params) + } + + def addParams(key: String, value: String): Unit = { + this.params.put(key, value) + } + + def `while`[M](m: MetaData => M, r: Record => Unit): M = { + val metaData = fsReader.getMetaData + val t = m(metaData) + if (pageTrigger) { + fsReader.skip(start) + } + count = start + var hasRemovedFlag = false + while (fsReader.hasNext && ifContinueRead) { + val record = fsReader.getRecord + var needRemoveFlag = false + if (hasRemovedFlag == false && fsReader.isInstanceOf[StorageScriptFsReader]) { + val parser = fsReader.asInstanceOf[StorageScriptFsReader].getScriptParser() + val meta = metaData.asInstanceOf[ScriptMetaData].getMetaData + if ( + meta != null && meta.length > 0 + && parser != null && parser.getAnnotationSymbol().equals(record.toString) + ) { + needRemoveFlag = true + hasRemovedFlag = true + } + } + if (needRemoveFlag == false) { + r(shuffler(record)) + totalLine += 1 + count += 1 + } + } + t + } + + /** + * Get the colNumber and rowNumber of the row to be counted + * @param needToCountRowNumber + * @return + * colNumber, rowNumber + */ + def getFileInfo(needToCountRowNumber: Int = 5000): Pair[Int, Int] = { + val metaData = fsReader.getMetaData + val colNumber = metaData match { + case tableMetaData: TableMetaData => tableMetaData.columns.length + case _ => 1 + } + val rowNumber = if (needToCountRowNumber == -1) { + fsReader.skip(Int.MaxValue) + } else { + fsReader.skip(needToCountRowNumber) + } + new Pair(colNumber, rowNumber) + } + + def write[K <: MetaData, V <: Record](fsWriter: FsWriter[K, V]): Unit = { + `while`(fsWriter.addMetaData, fsWriter.addRecord) + } + + def collect(): Pair[Object, util.ArrayList[Array[String]]] = { + val record = new util.ArrayList[Array[String]] + var overFlag = false + var tmpBytes = 0L + + val metaData = `while`( + collectMetaData, + r => { + if (limitBytes > 0 && !overFlag) { + val resArr = collectRecord(r) + resArr.foreach(res => tmpBytes = tmpBytes + res.getBytes.length) + if (tmpBytes > limitBytes) { + overFlag = true + } + record.add(resArr) + } else { + record.add(collectRecord(r)) + } + } + ) + new Pair(metaData, record) + } + + def collectRecord(record: Record): Array[String] = { + record match { + case t: TableRecord => + if (limitColumnLength > 0) { + t.row.map { col => + val str = DataType.valueToString(col) + if (str.length > limitColumnLength) { + str.substring(0, limitColumnLength) + } else { + str + } + } + } else { + t.row.map(DataType.valueToString) + } + case l: LineRecord => Array(l.getLine) + } + } + + def collectMetaData(metaData: MetaData): Object = { + // script/text ,tableResultset,lineResultSet + metaData match { + case s: ScriptMetaData => VariableParser.getMap(s.getMetaData) + case l: LineMetaData => l.getMetaData + case t: TableMetaData => t.columns.map(ColumnToMap) + } + } + + private def ColumnToMap(column: Column): java.util.Map[String, String] = { + Map[String, String]( + "columnName" -> column.columnName, + "comment" -> column.comment, + "dataType" -> column.dataType.typeName + ) + }.asJava + + // 如果不分页,则一直读,如果分页,则 count需要小于count + def ifContinueRead: Boolean = !pageTrigger || count <= end + + def ifStartRead: Boolean = !pageTrigger || count >= start + + def setLimitBytes(limitBytes: Long): Unit = { + this.limitBytes = limitBytes + } + + def setLimitColumnLength(limitColumnLength: Int): Unit = { + this.limitColumnLength = limitColumnLength + } + + override def close(): Unit = IOUtils.closeQuietly(fsReader) + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala new file mode 100644 index 0000000000..adbb596aa2 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.source + +import org.apache.linkis.storage.domain.Dolphin +import org.apache.linkis.storage.resultset.table.TableRecord +import org.apache.linkis.storage.utils.StorageUtils + +class ResultsetFileSource(fileSplits: Array[FileSplit]) extends AbstractFileSource(fileSplits) { + + shuffle({ + case t: TableRecord => + new TableRecord(t.row.map { rvalue => + { + rvalue match { + case null | "NULL" => + val nullValue = getParams.getOrDefault("nullValue", "NULL") + if (nullValue.equals(Dolphin.LINKIS_NULL)) { + rvalue + } else { + nullValue + } + case "" => + val nullValue = getParams.getOrDefault("nullValue", "") + if (nullValue.equals(Dolphin.LINKIS_NULL)) { + "" + } else { + nullValue + } + case value: Double => StorageUtils.doubleToString(value) + case _ => rvalue + } + } + }) + case record => record + }) + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/TextFileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/TextFileSource.scala new file mode 100644 index 0000000000..08ce8f51c7 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/TextFileSource.scala @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.source + +import org.apache.linkis.storage.LineRecord +import org.apache.linkis.storage.script.ScriptRecord + +import org.apache.commons.math3.util.Pair + +import java.util + +import scala.collection.JavaConverters._ + +class TextFileSource(fileSplits: Array[FileSplit]) extends AbstractFileSource(fileSplits) { + + shuffle({ + case s: ScriptRecord if "".equals(s.getLine) => new LineRecord("\n") + case record => record + }) + + override def collect(): Array[Pair[Object, util.ArrayList[Array[String]]]] = { + val collects: Array[Pair[Object, util.ArrayList[Array[String]]]] = super.collect() + if (!getParams.getOrDefault("ifMerge", "true").toBoolean) return collects + val snds: Array[util.ArrayList[Array[String]]] = collects.map(_.getSecond) + snds.foreach { snd => + val str = new StringBuilder + snd.asScala.foreach { + case Array("\n") => str.append("\n") + case Array(y) => str.append(y).append("\n") + } + snd.clear() + snd.add(Array(str.toString())) + } + collects + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala new file mode 100644 index 0000000000..9c344fa802 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/FileSystemUtils.scala @@ -0,0 +1,171 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.utils + +import org.apache.linkis.common.io.FsPath +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.fs.impl.LocalFileSystem + +import java.io.IOException +import java.util + +object FileSystemUtils extends Logging { + + def copyFile(filePath: FsPath, origin: FsPath, user: String): Unit = { + val fileSystem = FSFactory.getFsByProxyUser(filePath, user).asInstanceOf[FileSystem] + Utils.tryFinally { + fileSystem.init(null) + if (!fileSystem.exists(filePath)) { + if (!fileSystem.exists(filePath.getParent)) { + fileSystem.mkdirs(filePath.getParent) + } + fileSystem.createNewFile(filePath) + } + fileSystem.copyFile(origin, filePath) + }(Utils.tryQuietly(fileSystem.close())) + } + + /** + * Create a new file(创建新文件) + * + * @param filePath + * @param createParentWhenNotExists + * Whether to recursively create a directory(是否递归创建目录) + */ + def createNewFile(filePath: FsPath, createParentWhenNotExists: Boolean): Unit = { + createNewFile(filePath, StorageUtils.getJvmUser, createParentWhenNotExists) + } + + def createNewFile(filePath: FsPath, user: String, createParentWhenNotExists: Boolean): Unit = { + val fileSystem = FSFactory.getFsByProxyUser(filePath, user).asInstanceOf[FileSystem] + Utils.tryFinally { + fileSystem.init(null) + createNewFileWithFileSystem(fileSystem, filePath, user, createParentWhenNotExists) + }(Utils.tryQuietly(fileSystem.close())) + } + + @deprecated("please use createNewFileAndSetOwnerWithFileSystem") + def createNewFileWithFileSystem( + fileSystem: FileSystem, + filePath: FsPath, + user: String, + createParentWhenNotExists: Boolean + ): Unit = { + if (!fileSystem.exists(filePath)) { + if (!fileSystem.exists(filePath.getParent)) { + if (!createParentWhenNotExists) { + throw new IOException("parent dir " + filePath.getParent.getPath + " dose not exists.") + } + mkdirs(fileSystem, filePath.getParent, user) + } + fileSystem.createNewFile(filePath) + fileSystem match { + case l: LocalFileSystem => fileSystem.setOwner(filePath, user) + case _ => logger.info(s"doesn't need to call setOwner") + } + } + } + + /** + * create new file and set file owner by FileSystem + * @param fileSystem + * @param filePath + * @param user + * @param createParentWhenNotExists + */ + def createNewFileAndSetOwnerWithFileSystem( + fileSystem: FileSystem, + filePath: FsPath, + user: String, + createParentWhenNotExists: Boolean + ): Unit = { + if (!fileSystem.exists(filePath)) { + if (!fileSystem.exists(filePath.getParent)) { + if (!createParentWhenNotExists) { + throw new IOException("parent dir " + filePath.getParent.getPath + " dose not exists.") + } + mkdirsAndSetOwner(fileSystem, filePath.getParent, user) + } + fileSystem.createNewFile(filePath) + fileSystem.setOwner(filePath, user) + } + } + + /** + * Recursively create a directory(递归创建目录) + * @param fileSystem + * @param dest + * @param user + * @throws + * @return + */ + @throws[IOException] + @deprecated("please use mkdirsAndSetOwner") + def mkdirs(fileSystem: FileSystem, dest: FsPath, user: String): Boolean = { + var parentPath = dest.getParent + val dirsToMake = new util.Stack[FsPath]() + dirsToMake.push(dest) + while (!fileSystem.exists(parentPath)) { + dirsToMake.push(parentPath) + parentPath = parentPath.getParent + } + if (!fileSystem.canExecute(parentPath)) { + throw new IOException("You have not permission to access path " + dest.getPath) + } + while (!dirsToMake.empty()) { + val path = dirsToMake.pop() + fileSystem.mkdir(path) + fileSystem match { + case l: LocalFileSystem => fileSystem.setOwner(path, user) + case _ => logger.info(s"doesn't need to call setOwner") + } + } + true + } + + /** + * Recursively create a directory(递归创建目录) 默认添加 Owner 信息 + * @param fileSystem + * @param dest + * @param user + * @throws + * @return + */ + @throws[IOException] + def mkdirsAndSetOwner(fileSystem: FileSystem, dest: FsPath, user: String): Boolean = { + var parentPath = dest.getParent + val dirsToMake = new util.Stack[FsPath]() + dirsToMake.push(dest) + while (!fileSystem.exists(parentPath)) { + dirsToMake.push(parentPath) + parentPath = parentPath.getParent + } + if (!fileSystem.canExecute(parentPath)) { + throw new IOException("You have not permission to access path " + dest.getPath) + } + while (!dirsToMake.empty()) { + val path = dirsToMake.pop() + fileSystem.mkdir(path) + fileSystem.setOwner(path, user) + } + true + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala new file mode 100644 index 0000000000..e73991db15 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.utils + +import org.apache.linkis.common.conf.{ByteType, CommonVars} + +object StorageConfiguration { + + val PROXY_USER = CommonVars("wds.linkis.storage.proxy.user", "${UM}") + + val STORAGE_ROOT_USER = CommonVars("wds.linkis.storage.root.user", "hadoop") + + val HDFS_ROOT_USER = CommonVars("wds.linkis.storage.hdfs.root.user", "hadoop") + + val LOCAL_ROOT_USER = CommonVars("wds.linkis.storage.local.root.user", "root") + + val STORAGE_USER_GROUP = CommonVars("wds.linkis.storage.fileSystem.group", "bdap") + + val STORAGE_RS_FILE_TYPE = CommonVars("wds.linkis.storage.rs.file.type", "utf-8") + + val STORAGE_RS_FILE_SUFFIX = CommonVars("wds.linkis.storage.rs.file.suffix", ".dolphin") + + val LINKIS_STORAGE_FS_LABEL = CommonVars("linkis.storage.default.fs.label", "linkis-storage") + + val ResultTypes = List("%TEXT", "%TABLE", "%HTML", "%IMG", "%ANGULAR", "%SVG") + + val STORAGE_RESULT_SET_PACKAGE = + CommonVars("wds.linkis.storage.result.set.package", "org.apache.linkis.storage.resultset") + + val STORAGE_RESULT_SET_CLASSES = CommonVars( + "wds.linkis.storage.result.set.classes", + "txt.TextResultSet,table.TableResultSet,io.IOResultSet,html.HtmlResultSet,picture.PictureResultSet" + ) + + val STORAGE_BUILD_FS_CLASSES = CommonVars( + "wds.linkis.storage.build.fs.classes", + "org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem" + ) + + val IS_SHARE_NODE = CommonVars("wds.linkis.storage.is.share.node", true) + + val ENABLE_IO_PROXY = CommonVars("wds.linkis.storage.enable.io.proxy", false) + + val IO_USER = CommonVars("wds.linkis.storage.io.user", "root") + val IO_FS_EXPIRE_TIME = CommonVars("wds.linkis.storage.io.fs.num", 1000 * 60 * 10) + + val IO_PROXY_READ_FETCH_SIZE = + CommonVars("wds.linkis.storage.io.read.fetch.size", new ByteType("100k")) + + val IO_PROXY_WRITE_CACHE_SIZE = + CommonVars("wds.linkis.storage.io.write.cache.size", new ByteType("64k")) + + val IO_DEFAULT_CREATOR = CommonVars("wds.linkis.storage.io.default.creator", "IDE") + val IO_FS_RE_INIT = CommonVars("wds.linkis.storage.io.fs.re.init", "re-init") + + val IO_INIT_RETRY_LIMIT = CommonVars("wds.linkis.storage.io.init.retry.limit", 10) + + val STORAGE_HDFS_GROUP = CommonVars("wds.linkis.storage.fileSystem.hdfs.group", "hadoop") + + val DOUBLE_FRACTION_LEN = CommonVars[Int]("wds.linkis.double.fraction.length", 30) + + val HDFS_PATH_PREFIX_CHECK_ON = + CommonVars[Boolean]("wds.linkis.storage.hdfs.prefix_check.enable", true) + + val HDFS_PATH_PREFIX_REMOVE = CommonVars[Boolean]("wds.linkis.storage.hdfs.prefxi.remove", true) + + val FS_CHECKSUM_DISBALE = + CommonVars[java.lang.Boolean]("linkis.fs.hdfs.impl.disable.checksum", false) + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageHelper.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageHelper.scala new file mode 100644 index 0000000000..7e5169fc55 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageHelper.scala @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.utils + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetReader} +import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord} + +/** + * 工具类,用于做storage jar包打出来做测试用 Tool class, which is used to print the storage jar package for testing + */ + +object StorageHelper extends Logging { + + def main(args: Array[String]): Unit = { + if (args.length < 2) logger.info("Usage method params eg:getTableResLines path") + val method = args(0) + val params = args.slice(1, args.length) + Thread.sleep(10000L) + + method match { + case "getTableResLines" => getTableResLines(params) + case "getTableRes" => getTableRes(params) + case "createNewFile" => createNewFile(params) + case _ => logger.info("There is no such method") + } + } + + /** + * Get the number of table result set file lines(获得表格结果集文件行数) + * + * @param args + */ + def getTableResLines(args: Array[String]): Unit = { + val resPath = StorageUtils.getFsPath(args(0)) + val resultSetFactory = ResultSetFactory.getInstance + val resultSet = resultSetFactory.getResultSetByType(ResultSetFactory.TABLE_TYPE) + val fs = FSFactory.getFs(resPath) + fs.init(null) + val reader = ResultSetReader.getResultSetReader(resultSet, fs.read(resPath)) + val rmetaData = reader.getMetaData + rmetaData.asInstanceOf[TableMetaData].columns.foreach(column => logger.info(column.toString)) + var num = 0 + Thread.sleep(10000L) + while (reader.hasNext) { + reader.getRecord + num = num + 1 + } + logger.info(num.toString) + reader.close() + } + + def getTableRes(args: Array[String]): Unit = { + val len = Integer.parseInt(args(1)) + val max = len + 10 + val resPath = StorageUtils.getFsPath(args(0)) + val resultSetFactory = ResultSetFactory.getInstance + val resultSet = resultSetFactory.getResultSetByType(ResultSetFactory.TABLE_TYPE) + val fs = FSFactory.getFs(resPath) + fs.init(null) + val reader = ResultSetReader.getResultSetReader(resultSet, fs.read(resPath)) + val rmetaData = reader.getMetaData + rmetaData.asInstanceOf[TableMetaData].columns.foreach(column => logger.info(column.toString)) + rmetaData + .asInstanceOf[TableMetaData] + .columns + .map(_.columnName + ",") + .foreach(column => logger.info(column)) + var num = 0 + while (reader.hasNext) { + num = num + 1 + if (num > max) return + if (num > len) { + val record = reader.getRecord + record.asInstanceOf[TableRecord].row.foreach { value => + logger.info(value.toString) + logger.info(",") + } + logger.info("\n") + } + } + } + + def createNewFile(args: Array[String]): Unit = { + val resPath = StorageUtils.getFsPath(args(0)) + val proxyUser = StorageUtils.getJvmUser + FileSystemUtils.createNewFile(resPath, proxyUser, true) + logger.info("success") + } + +} diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala new file mode 100644 index 0000000000..4b9368c049 --- /dev/null +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala @@ -0,0 +1,231 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.storage.utils + +import org.apache.linkis.common.io.{Fs, FsPath} +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.hadoop.common.conf.HadoopConf +import org.apache.linkis.storage.{LineMetaData, LineRecord} +import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.CONFIGURATION_NOT_READ +import org.apache.linkis.storage.exception.StorageFatalException +import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetReader, ResultSetWriter} + +import org.apache.commons.lang3.StringUtils + +import java.io.{Closeable, File, InputStream, OutputStream} +import java.lang.reflect.Method +import java.text.NumberFormat + +import scala.collection.mutable + +object StorageUtils extends Logging { + + val HDFS = "hdfs" + val FILE = "file" + + val FILE_SCHEMA = "file://" + val HDFS_SCHEMA = "hdfs://" + + private val nf = NumberFormat.getInstance() + nf.setGroupingUsed(false) + nf.setMaximumFractionDigits(StorageConfiguration.DOUBLE_FRACTION_LEN.getValue) + + def doubleToString(value: Double): String = { + if (value.isNaN) { + "NaN" + } else { + nf.format(value) + } + } + + def loadClass[T](classStr: String, op: T => String): Map[String, T] = { + val _classes = classStr.split(",") + val classes = mutable.LinkedHashMap[String, T]() + for (clazz <- _classes) { + Utils.tryAndError { + val obj = Utils.getClassInstance[T](clazz.trim) + classes += op(obj) -> obj + } + } + classes.toMap + } + + /** + * Get the corresponding class by passing in the subclass and package name(通过传入子类和包名获得对应的class) + * @param classStr:Class + * name(类名) + * @param pge:Class + * package name(类的包名) + * @param op:Get + * key value(获取键值) + * @tparam T + * @return + */ + def loadClasses[T]( + classStr: String, + pge: String, + op: Class[T] => String + ): Map[String, Class[T]] = { + val _classes: Array[String] = + if (StringUtils.isEmpty(pge)) classStr.split(",") + else classStr.split(",").map { value: String => pge + "." + value } + val classes = mutable.LinkedHashMap[String, Class[T]]() + for (clazz <- _classes) { + Utils.tryAndError({ + val _class = + Thread.currentThread.getContextClassLoader.loadClass(clazz.trim).asInstanceOf[Class[T]] + classes += op(_class) -> _class + }) + } + classes.toMap + } + + /** + * Get the suffix of the file name(获得文件名的后缀) + * @param path + * @return + */ + def pathToSuffix(path: String): String = { + val fileName = new File(path).getName + if ((fileName != null) && (fileName.length > 0)) { + val dot: Int = fileName.lastIndexOf('.') + if ((dot > -1) && (dot < (fileName.length - 1))) return fileName.substring(dot + 1) + } + fileName + } + + /** + * Reflection calling method(反射调用方法) + * @param obj + * @param method + * @param args + * @return + */ + def invoke(obj: Any, method: Method, args: Array[AnyRef]): Any = { + method.invoke(obj, args) + } + + /** + * Serialized string is a result set of type Text(序列化字符串为Text类型的结果集) + * @param value + * @return + */ + def serializerStringToResult(value: String): String = { + val resultSet = ResultSetFactory.getInstance.getResultSetByType(ResultSetFactory.TEXT_TYPE) + val writer = ResultSetWriter.getResultSetWriter(resultSet, Long.MaxValue, null) + val metaData = new LineMetaData() + val record = new LineRecord(value) + writer.addMetaData(metaData) + writer.addRecord(record) + val res = writer.toString() + Utils.tryQuietly(writer.close()) + res + } + + /** + * The result set of serialized text is a string(序列化text的结果集为字符串) + * @param result + * @return + */ + def deserializerResultToString(result: String): String = { + val resultSet = ResultSetFactory.getInstance.getResultSetByType(ResultSetFactory.TEXT_TYPE) + val reader = ResultSetReader.getResultSetReader(resultSet, result) + reader.getMetaData + val sb = new StringBuilder + while (reader.hasNext) { + val record = reader.getRecord.asInstanceOf[LineRecord] + sb.append(record.getLine) + } + val value = sb.toString() + Utils.tryQuietly(reader.close()) + value + } + + def close(outputStream: OutputStream): Unit = { + close(outputStream, null, null) + } + + def close(inputStream: InputStream): Unit = { + close(null, inputStream, null) + } + + def close(fs: Fs): Unit = { + close(null, null, fs) + } + + def close(outputStream: OutputStream, inputStream: InputStream, fs: Fs): Unit = { + Utils.tryFinally(if (outputStream != null) outputStream.close())() + Utils.tryFinally(if (inputStream != null) inputStream.close())() + Utils.tryFinally(if (fs != null) fs.close())() + } + + def close(closeable: Closeable): Unit = { + Utils.tryFinally(if (closeable != null) closeable.close())() + } + + def getJvmUser: String = System.getProperty("user.name") + + def isHDFSNode: Boolean = { + val confPath = new File(HadoopConf.hadoopConfDir) + // TODO IO-client mode need return false + if (!confPath.exists() || confPath.isFile) { + throw new StorageFatalException( + CONFIGURATION_NOT_READ.getErrorCode, + CONFIGURATION_NOT_READ.getErrorDesc + ) + } else true + } + + /** + * Returns the FsPath by determining whether the path is a schema. By default, the FsPath of the + * file is returned. 通过判断path是否为schema来返回FsPath,默认返回file的FsPath + * @param path + * @return + */ + def getFsPath(path: String): FsPath = { + if (path.startsWith(FILE_SCHEMA) || path.startsWith(HDFS_SCHEMA)) new FsPath(path) + else { + new FsPath(FILE_SCHEMA + path) + } + } + + def readBytes(inputStream: InputStream, bytes: Array[Byte], len: Int): Int = { + var count = 0 + var readLen = 0 + // 当使用s3存储结果文件时时,com.amazonaws.services.s3.model.S3InputStream无法正确读取.dolphin文件。需要在循环条件添加: + // readLen >= 0 + // To resolve the issue when using S3 to store result files and + // com.amazonaws.services.s3.model.S3InputStream to read .dolphin files, you need to add the + // condition readLen >= 0 in the loop. + while (readLen < len && readLen >= 0) { + count = inputStream.read(bytes, readLen, len - readLen) + if (count == -1 && inputStream.available() < 1) return readLen + readLen += count + } + readLen + } + + def isIOProxy(): Boolean = { + StorageConfiguration.ENABLE_IO_PROXY.getValue + } + + def isHDFSPath(fsPath: FsPath): Boolean = { + HDFS.equals(fsPath.getFsType) + } + +} diff --git a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/LineMetaDataTest.java b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/LineMetaDataTest.java deleted file mode 100644 index 902e835ec2..0000000000 --- a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/LineMetaDataTest.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage; - -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.Test; - -class LineMetaDataTest { - - @Test - public void testCloneMeta() { - LineMetaData origin = new LineMetaData("origin"); - LineMetaData copied = (LineMetaData) origin.cloneMeta(); - origin.setMetaData(origin.getMetaData().replace("o", "a")); - - System.out.println(origin.getMetaData()); - Assertions.assertThat(origin.getMetaData()).isEqualTo("arigin"); - System.out.println(copied.getMetaData()); - Assertions.assertThat(copied.getMetaData()).isEqualTo("origin"); - } -} diff --git a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/resultset/StorageResultSetWriterFactoryTest.java b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/resultset/StorageResultSetWriterFactoryTest.java deleted file mode 100644 index e7e79a7058..0000000000 --- a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/resultset/StorageResultSetWriterFactoryTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.resultset; - -import org.apache.linkis.common.io.MetaData; -import org.apache.linkis.common.io.Record; -import org.apache.linkis.common.io.resultset.ResultSet; -import org.apache.linkis.storage.LineMetaData; -import org.apache.linkis.storage.LineRecord; - -import java.io.*; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -class StorageResultSetWriterFactoryTest { - - @Test - void testResultSetWrite() throws IOException { - // storage write - ResultSet resultSetByType = - ResultSetFactory.getInstance().getResultSetByType(ResultSetFactory.TEXT_TYPE); - - org.apache.linkis.common.io.resultset.ResultSetWriter - writer = ResultSetWriterFactory.getResultSetWriter(resultSetByType, 100L, null); - - String value = "value"; - LineMetaData metaData = new LineMetaData(null); - LineRecord record = new LineRecord(value); - writer.addMetaData(metaData); - writer.addRecord(record); - writer.flush(); - writer.close(); - String res = writer.toString(); - writer.close(); - Assertions.assertEquals("dolphin00000000010000000004NULL0000000005value", res); - } -} diff --git a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriterTest.java b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriterTest.java index 2e1a30aa27..60a593665c 100644 --- a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriterTest.java +++ b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/script/writer/StorageScriptFsWriterTest.java @@ -25,6 +25,7 @@ import org.apache.linkis.storage.script.Variable; import org.apache.linkis.storage.script.VariableParser; import org.apache.linkis.storage.source.FileSource; +import org.apache.linkis.storage.source.FileSource$; import org.apache.commons.math3.util.Pair; @@ -37,6 +38,7 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -105,7 +107,7 @@ void TestSave() { Variable[] v = VariableParser.getVariables(params); List variableList = Arrays.stream(v) - .filter(var -> !StringUtils.isEmpty(var.getValue())) + .filter(var -> !StringUtils.isEmpty(var.value())) .collect(Collectors.toList()); try { @@ -136,8 +138,8 @@ void TestOpen() throws FileNotFoundException { InputStream inputStream = new FileInputStream(file); - FileSource fileSource = FileSource.create(new FsPath(fileName), inputStream); - Pair> collect = fileSource.collect()[0]; + FileSource fileSource = FileSource$.MODULE$.create(new FsPath(fileName), inputStream); + Pair> collect = fileSource.collect()[0]; String scriptRes = collect.getSecond().get(0)[0]; String metadataRes = new Gson().toJson(collect.getFirst()); diff --git a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/source/ResultsetFileSourceTest.java b/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/source/ResultsetFileSourceTest.java deleted file mode 100644 index 1210c64e03..0000000000 --- a/linkis-commons/linkis-storage/src/test/java/org/apache/linkis/storage/source/ResultsetFileSourceTest.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.storage.source; - -import org.apache.linkis.common.io.Fs; -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.storage.FSFactory; -import org.apache.linkis.storage.csv.CSVFsWriter; - -import java.io.IOException; -import java.io.OutputStream; - -import org.junit.jupiter.api.Test; - -class ResultsetFileSourceTest { - - @Test - public void testWriter() throws IOException { - String filePath = this.getClass().getResource("/result-read-test.dolphin").getFile().toString(); - FsPath sourceFsPath = new FsPath(filePath); - Fs sourceFs = FSFactory.getFs(sourceFsPath); - sourceFs.init(null); - - FsPath destFsPath = new FsPath(filePath + ".result"); - Fs destFs = FSFactory.getFs(destFsPath); - destFs.init(null); - OutputStream outputStream = destFs.write(destFsPath, true); - - CSVFsWriter cSVFsWriter = CSVFsWriter.getCSVFSWriter("UTF-8", ",", false, outputStream); - FileSource fileSource = FileSource.create(sourceFsPath, sourceFs); - fileSource.addParams("nullValue", "NULL").write(cSVFsWriter); - - cSVFsWriter.close(); - } -} diff --git a/linkis-commons/linkis-storage/src/test/resources/result-read-test.dolphin b/linkis-commons/linkis-storage/src/test/resources/result-read-test.dolphin deleted file mode 100644 index 0c48c045b0..0000000000 --- a/linkis-commons/linkis-storage/src/test/resources/result-read-test.dolphin +++ /dev/null @@ -1 +0,0 @@ -dolphin0000000002000000002900000000063,6,4,_c0bigintNULL000000001400000000022,55 \ No newline at end of file diff --git a/linkis-commons/linkis-storage/src/test/resources/storage-read-test.dolphin b/linkis-commons/linkis-storage/src/test/resources/storage-read-test.dolphin deleted file mode 100644 index b94e3d96fb..0000000000 --- a/linkis-commons/linkis-storage/src/test/resources/storage-read-test.dolphin +++ /dev/null @@ -1 +0,0 @@ -dolphin00000000010000000004NULL0000000011hello world \ No newline at end of file diff --git a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala index 3bfc35b17c..e0d3d7efb6 100644 --- a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala +++ b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala @@ -25,7 +25,7 @@ class DataTypeTest { @DisplayName("constTest") def constTest(): Unit = { - val nullvalue = DataType.NULL_VALUE + val nullvalue = Dolphin.NULL val lowcasenullvalue = DataType.LOWCASE_NULL_VALUE Assertions.assertEquals("NULL", nullvalue) @@ -60,4 +60,19 @@ class DataTypeTest { } + @Test + @DisplayName("toValueTest") + def toValueTest(): Unit = { + val dateType = DataType.toDataType("double") + val str = DataType.toValue(dateType, "NaN") + Assertions.assertNotNull(str) + } + + @Test + @DisplayName("decimalTest") + def decimalTest(): Unit = { + val dateType = DataType.toDataType("decimal(10, 8)") + Assertions.assertTrue(dateType.typeName.equals("decimal")) + } + } diff --git a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala index 6534b25c6f..ecd5c89cf9 100644 --- a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala +++ b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala @@ -46,7 +46,6 @@ class StorageConfigurationTest { val doublefractionlen = StorageConfiguration.DOUBLE_FRACTION_LEN.getValue val hdfspathprefixcheckon = StorageConfiguration.HDFS_PATH_PREFIX_CHECK_ON.getValue val hdfspathprefixremove = StorageConfiguration.HDFS_PATH_PREFIX_REMOVE.getValue - val fscachedisable = StorageConfiguration.FS_CACHE_DISABLE.getValue val fschecksumdisbale = StorageConfiguration.FS_CHECKSUM_DISBALE.getValue Assertions.assertEquals("hadoop", storagerootuser) @@ -62,8 +61,7 @@ class StorageConfigurationTest { storageresultsetclasses ) Assertions.assertEquals( - "org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem," + - "org.apache.linkis.storage.factory.impl.BuildOSSSystem,org.apache.linkis.storage.factory.impl.BuildS3FileSystem", + "org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem", storagebuildfsclasses ) Assertions.assertTrue(issharenode) @@ -77,7 +75,6 @@ class StorageConfigurationTest { Assertions.assertTrue(30 == doublefractionlen) Assertions.assertTrue(hdfspathprefixcheckon) Assertions.assertTrue(hdfspathprefixremove) - Assertions.assertFalse(fscachedisable) Assertions.assertFalse(fschecksumdisbale) } diff --git a/linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientImplTest.scala b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala similarity index 72% rename from linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientImplTest.scala rename to linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala index 83408ed7e8..6ae15782ee 100644 --- a/linkis-public-enhancements/linkis-pes-client/src/test/scala/org/apache/linkis/filesystem/WorkspaceClientImplTest.scala +++ b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala @@ -15,22 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.filesystem +package org.apache.linkis.storage.utils import org.junit.jupiter.api.{Assertions, DisplayName, Test} -class WorkspaceClientImplTest { +class StorageUtilsTest { @Test - @DisplayName("createObject") - def createObject(): Unit = { + @DisplayName("doubleToStringTest") + def doubleToStringTest(): Unit = { + val str = StorageUtils.doubleToString(Double.NaN) + Assertions.assertEquals("NaN", str) - val user = "hadoop" - val token = "abc" - val gatewayAddress = "127.0.0.1:9001" - - val client = new WorkspaceClientImpl(user, token, gatewayAddress) - Assertions.assertNotNull(client) } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties index 0259bfc6dc..a792c9ef69 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties @@ -17,7 +17,7 @@ wds.linkis.client.common.creator=LINKISCLI wds.linkis.client.common.gatewayUrl=http://127.0.0.1:9001 wds.linkis.client.common.authStrategy=token wds.linkis.client.common.tokenKey=Validation-Code -wds.linkis.client.common.tokenValue=BML-AUTH +wds.linkis.client.common.tokenValue=LINKIS-AUTH wds.linkis.client.noncustomizable.enable.user.specification=true #wds.linkis.client.noncustomizable.enable.proxy.user=true #wds.linkis.client.common.submitUser diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties index 90208811bf..8d20858645 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties @@ -17,7 +17,7 @@ wds.linkis.client.common.gatewayUrl=http://127.0.0.1:9001 wds.linkis.client.common.authStrategy=token wds.linkis.client.common.tokenKey=Validation-Code -wds.linkis.client.common.tokenValue=BML-AUTH +wds.linkis.client.common.tokenValue=LINKIS-AUTH # #wds.linkis.client.common.submitUser #wds.linkis.client.common.submitPassword diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/ujes/client/JobObserveActionTest.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/ujes/client/JobObserveActionTest.scala index b55f42159b..1dec59387f 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/ujes/client/JobObserveActionTest.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/ujes/client/JobObserveActionTest.scala @@ -29,7 +29,7 @@ import java.util.concurrent.TimeUnit @Deprecated object JobObserveActionTest extends App { - val bmlToken = CommonVars("wds.linkis.bml.auth.token.value", "BML-AUTH").getValue + val bmlToken = CommonVars("wds.linkis.bml.auth.token.value", "LINKIS-AUTH").getValue val clientConfig = DWSClientConfigBuilder.newBuilder() .addServerUrl("127.0.0.1:9001") // Change to test gateway address diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/ECPathUtils.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/ECPathUtils.java deleted file mode 100644 index 739c256af1..0000000000 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/ECPathUtils.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.governance.common.utils; - -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.time.DateFormatUtils; - -import java.io.File; -import java.nio.file.Paths; - -public class ECPathUtils { - - public static String getECWOrkDirPathSuffix(String user, String ticketId, String engineType) { - String engineTypeRes = ""; - if (StringUtils.isNotBlank(engineType)) { - engineTypeRes = engineType; - } - File file = - Paths.get( - user, DateFormatUtils.format(System.currentTimeMillis(), "yyyyMMdd"), engineTypeRes) - .toFile(); - return file.getPath() + File.separator + ticketId; - } - - public static String getECLogDirSuffix( - EngineTypeLabel engineTypeLabel, UserCreatorLabel userCreatorLabel, String ticketId) { - if (null == engineTypeLabel || null == userCreatorLabel) { - return ""; - } - String ecwOrkDirPathSuffix = - ECPathUtils.getECWOrkDirPathSuffix( - userCreatorLabel.getUser(), ticketId, engineTypeLabel.getEngineType()); - return ecwOrkDirPathSuffix + File.separator + "logs"; - } -} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala similarity index 81% rename from linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala rename to linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala index b8b156173b..b0c9dda666 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala @@ -18,7 +18,7 @@ package org.apache.linkis.governance.common.conf import org.apache.linkis.common.conf.{CommonVars, Configuration} -import org.apache.linkis.manager.label.conf.LabelCommonConfig +import org.apache.linkis.governance.common.constant.ec.ECConstants object GovernanceCommonConf { @@ -26,11 +26,9 @@ object GovernanceCommonConf { val WILDCARD_CONSTANT = "*" - val SPARK_ENGINE_VERSION = - CommonVars("wds.linkis.spark.engine.version", LabelCommonConfig.SPARK_ENGINE_VERSION.getValue) + val SPARK_ENGINE_VERSION = CommonVars("wds.linkis.spark.engine.version", "2.4.3") - val HIVE_ENGINE_VERSION = - CommonVars("wds.linkis.hive.engine.version", LabelCommonConfig.HIVE_ENGINE_VERSION.getValue) + val HIVE_ENGINE_VERSION = CommonVars("wds.linkis.hive.engine.version", "1.2.1") val PYTHON_ENGINE_VERSION = CommonVars("wds.linkis.python.engine.version", "python2") @@ -46,9 +44,7 @@ object GovernanceCommonConf { val ENGINE_APPLICATION_MANAGER_SPRING_NAME = CommonVars("wds.linkis.application.manager.name", "linkis-cg-linkismanager") - val ENGINE_CONN_PORT_RANGE = CommonVars("linkis.engineconn.port.range", "-") - - val ENGINE_CONN_DEBUG_PORT_RANGE = CommonVars("linkis.engineconn.debug.port.range", "-") + val ENGINE_CONN_PORT_RANGE = CommonVars("wds.linkis.engineconn.port.range", "-") val MANAGER_SERVICE_NAME = CommonVars( @@ -90,4 +86,13 @@ object GovernanceCommonConf { val EC_APP_MANAGE_MODE = CommonVars("linkis.ec.app.manage.mode", "attach") + /** + * DEFAULT_LOGPATH_PREFIX is the prefix that represents the default log storage path + * DEFAULT_LOGPATH_PREFIX 是表示默认的日志存储路径的前缀 和 结果集的前缀 + */ + val DEFAULT_LOGPATH_PREFIX = CommonVars[String]( + "wds.linkis.entrance.config.log.path", + CommonVars[String]("wds.linkis.filesystem.hdfs.root.path").getValue + ).getValue + } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala index 9f11419fb3..110b02b8fe 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/job/JobRequestConstants.scala @@ -17,11 +17,6 @@ package org.apache.linkis.governance.common.constant.job -/** - * @date - * 2021/3/17 - * @description - */ object JobRequestConstants { val JOB_ID = "jobId" diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala index ec7bb9e80a..4168308bd6 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala @@ -17,15 +17,14 @@ package org.apache.linkis.governance.common.exception -import org.apache.linkis.common.exception.{ExceptionLevel, LinkisRuntimeException} +import org.apache.linkis.common.exception.ErrorException class GovernanceErrorException(errorCode: Int, errorMsg: String) - extends LinkisRuntimeException(errorCode, errorMsg) { + extends ErrorException(errorCode, errorMsg) { def this(errorCode: Int, errorMsg: String, cause: Throwable) = { this(errorCode, errorMsg) initCause(cause) } - override def getLevel: ExceptionLevel = ExceptionLevel.ERROR } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/RequestTask.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/RequestTask.scala index 17c01fcfc2..4d0b8952ca 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/RequestTask.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/RequestTask.scala @@ -18,7 +18,6 @@ package org.apache.linkis.governance.common.protocol.task import org.apache.linkis.manager.label.entity.Label -import org.apache.linkis.protocol.RetryableProtocol import org.apache.linkis.protocol.message.RequestProtocol import java.util @@ -92,7 +91,7 @@ trait TaskState extends RequestProtocol {} case class RequestTaskPause(execId: String) extends TaskState case class RequestTaskResume(execId: String) extends TaskState -case class RequestTaskKill(execId: String) extends TaskState with RetryableProtocol +case class RequestTaskKill(execId: String) extends TaskState /** * The status of requesting job execution, mainly used for:
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseEngineConnPid.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseEngineConnPid.scala index ef1355d580..971bdf247b 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseEngineConnPid.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseEngineConnPid.scala @@ -25,4 +25,5 @@ import org.apache.linkis.protocol.message.RequestProtocol * @param pid */ case class ResponseEngineConnPid(serviceInstance: ServiceInstance, pid: String, ticketId: String) - extends RequestProtocol + extends RetryableProtocol + with RequestProtocol diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala index a4a7837da0..f59761dc43 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/task/ResponseTaskExecute.scala @@ -24,8 +24,6 @@ import org.apache.linkis.protocol.message.RequestProtocol import java.util -case class ResponseTaskExecute(execId: String) - case class ResponseTaskProgress( execId: String, progress: Float, @@ -41,31 +39,6 @@ case class EngineConcurrentInfo( failedTasks: Int ) -case class EngineOverloadInfo(maxMemory: Long, usedMemory: Long, systemCPUUsed: Float) - -case class ResponseEngineStatusChanged( - instance: String, - fromStatus: ExecutionNodeStatus, - toStatus: ExecutionNodeStatus, - overload: EngineOverloadInfo, - concurrent: EngineConcurrentInfo -) extends BroadcastProtocol - -case class ResponseEngineInfo( - createEntranceInstance: String, - creator: String, - user: String, - properties: util.Map[String, String] -) - -case class ResponseEngineStatus( - instance: String, - status: ExecutionNodeStatus, - overload: EngineOverloadInfo, - concurrent: EngineConcurrentInfo, - engineInfo: ResponseEngineInfo -) - case class ResponseTaskLog(execId: String, log: String) extends RequestProtocol case class ResponseTaskError(execId: String, errorMsg: String) diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala new file mode 100644 index 0000000000..236046f3d4 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.utils + +import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} + +import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.time.DateFormatUtils + +import java.io.File +import java.nio.file.Paths + +object ECPathUtils { + + def getECWOrkDirPathSuffix( + user: String, + ticketId: String, + engineType: String, + timeStamp: Long = System.currentTimeMillis() + ): String = { + val suffix = if (StringUtils.isBlank(engineType)) { + Paths + .get(user, DateFormatUtils.format(System.currentTimeMillis(), "yyyyMMdd")) + .toFile + .getPath + } else { + Paths + .get(user, DateFormatUtils.format(System.currentTimeMillis(), "yyyyMMdd"), engineType) + .toFile + .getPath + } + suffix + File.separator + ticketId + } + + def getECLogDirSuffix( + engineTypeLabel: EngineTypeLabel, + userCreatorLabel: UserCreatorLabel, + ticketId: String + ): String = { + if (null == engineTypeLabel || null == userCreatorLabel) { + return "" + } + val suffix = ECPathUtils.getECWOrkDirPathSuffix( + userCreatorLabel.getUser, + ticketId, + engineTypeLabel.getEngineType + ) + suffix + File.separator + "logs" + } + +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala index 684dd371a1..31d9b1e9b2 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/EngineConnArguments.scala @@ -145,9 +145,13 @@ class DefaultEngineConnArgumentsParser extends EngineConnArgumentsParser { val options = ArrayBuffer[String]() def write(confMap: Map[String, String], optionType: String): Unit = confMap.foreach { case (key, value) => - if (StringUtils.isNotEmpty(key) && StringUtils.isNotEmpty(value)) { + var realValue = value + if (key.startsWith("label") && StringUtils.isEmpty(realValue)) { + realValue = "true" + } + if (StringUtils.isNotEmpty(key) && StringUtils.isNotEmpty(realValue)) { options += optionType - options += (key + "=" + value) + options += (key + "=" + realValue) } } write(engineConnArguments.getEngineConnConfMap, ENGINE_CONN_CONF) diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala index ddcb17a3b2..ae83749ecb 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala @@ -24,11 +24,14 @@ import org.apache.linkis.governance.common.conf.GovernanceCommonConf import org.apache.commons.lang3.StringUtils import java.io.File +import java.text.SimpleDateFormat import java.util -import java.util.{ArrayList, List} +import java.util.{ArrayList, Date, List} object GovernanceUtils extends Logging { + val LINKIS_DEFAULT_RES_CREATOR = "linkis_default" + def killProcess(pid: String, desc: String, isSudo: Boolean): Unit = { val subProcessKillScriptPath = Configuration.getLinkisHome() + "/sbin/kill-process-by-pid.sh" if ( @@ -121,4 +124,26 @@ object GovernanceUtils extends Logging { } } + /** + * get result path parentPath: resPrefix + dateStr + result + creator subPath: parentPath + + * executeUser + taskid + filename + * + * @param creator + * @return + */ + def getResultParentPath(creator: String): String = { + val resPrefix = GovernanceCommonConf.DEFAULT_LOGPATH_PREFIX + val resStb = new StringBuilder() + if (resStb.endsWith("/")) { + resStb.append(resPrefix) + } else { + resStb.append(resPrefix).append("/") + } + val dateFormat = new SimpleDateFormat("yyyy-MM-dd") + val date = new Date(System.currentTimeMillis) + val dateString = dateFormat.format(date) + resStb.append("result").append("/").append(dateString).append("/").append(creator) + resStb.toString() + } + } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/OnceExecutorContentUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/OnceExecutorContentUtils.scala index 2c426339b0..dd4b9bcffa 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/OnceExecutorContentUtils.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/OnceExecutorContentUtils.scala @@ -58,14 +58,14 @@ object OnceExecutorContentUtils { def mapToContent(contentMap: util.Map[String, Object]): OnceExecutorContent = { val onceExecutorContent = new OnceExecutorContent - def getOrNull(key: String): util.Map[String, Object] = contentMap.get(key) match { + implicit def getOrNull(key: String): util.Map[String, Object] = contentMap.get(key) match { case map: util.Map[String, Object] => map case _ => null } - onceExecutorContent.setJobContent(getOrNull(TaskConstant.JOB_CONTENT)) - onceExecutorContent.setRuntimeMap(getOrNull(TaskConstant.PARAMS_CONFIGURATION_RUNTIME)) - onceExecutorContent.setSourceMap(getOrNull(TaskConstant.SOURCE)) - onceExecutorContent.setVariableMap(getOrNull(TaskConstant.PARAMS_VARIABLE)) + onceExecutorContent.setJobContent(TaskConstant.JOB_CONTENT) + onceExecutorContent.setRuntimeMap(TaskConstant.PARAMS_CONFIGURATION_RUNTIME) + onceExecutorContent.setSourceMap(TaskConstant.SOURCE) + onceExecutorContent.setVariableMap(TaskConstant.PARAMS_VARIABLE) onceExecutorContent } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala index 96b6e9a1c2..7988a6c95d 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala @@ -42,8 +42,8 @@ class GovernanceCommonConfTest { val errorcodedesclen = GovernanceCommonConf.ERROR_CODE_DESC_LEN Assertions.assertEquals("wds.linkis.rm", conffilterrm) - Assertions.assertEquals("3.2.1", sparkengineversion) - Assertions.assertEquals("3.1.3", hiveengineversion) + Assertions.assertEquals("2.4.3", sparkengineversion) + Assertions.assertEquals("1.2.1", hiveengineversion) Assertions.assertEquals("python2", pythonengineversion) Assertions.assertFalse(pythoncodeparserswitch) Assertions.assertFalse(scalacodeparserswitch) diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala index 5b23d01fc3..cc79e24d4f 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala @@ -42,7 +42,6 @@ import org.apache.commons.lang3.StringUtils import java.io.{File, InputStream, OutputStream} import scala.collection.JavaConverters._ -import scala.collection.mutable trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { @@ -102,12 +101,7 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { case HIVE_CONF_DIR => putIfExists(HIVE_CONF_DIR) case JAVA_HOME => putIfExists(JAVA_HOME) case RANDOM_PORT => - environment.put( - RANDOM_PORT.toString, - PortUtils - .findAvailPortByRange(GovernanceCommonConf.ENGINE_CONN_DEBUG_PORT_RANGE.getValue) - .toString - ) + environment.put(RANDOM_PORT.toString, PortUtils.findAvailPort().toString) case PREFER_IP_ADDRESS => environment.put(PREFER_IP_ADDRESS.toString, Configuration.PREFER_IP_ADDRESS.toString) case ENGINECONN_ENVKEYS => @@ -157,19 +151,15 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { def getPid(): Option[String] = None protected def getCommandArgs: Array[String] = { - val recordMap: mutable.Map[String, String] = mutable.Map() - request.creationDesc.properties.asScala.foreach { case (k, v) => - if (k.contains(" ") || (v != null && v.contains(" "))) recordMap.put(k, v) - } - if (recordMap.size > 0) { - val keyAndValue = new StringBuilder - for (kv <- recordMap) { - keyAndValue.append(s"${kv._1}->${kv._2};") - } + if ( + request.creationDesc.properties.asScala.exists { case (k, v) => + k.contains(" ") || (v != null && v.contains(" ")) + } + ) { throw new ErrorException( 30000, - s"Startup parameters contain spaces! The key and value values of all its parameters are(启动参数中包含空格!其所有参数的key和value值分别为):${keyAndValue.toString()}" - ) + "Startup parameters contain spaces!(启动参数中包含空格!)" + ) // TODO exception } val arguments = EngineConnArgumentsBuilder.newBuilder() engineConnPort = PortUtils diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala index 21dd6cd706..8552020493 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala @@ -44,7 +44,7 @@ object PortUtils extends Logging { val ports = portRange.split(separator, -1) if (!ports(0).isEmpty) start = ports(0).toInt if (!ports(1).isEmpty) end = ports(1).toInt - val availablePort = start to end find { port => + val availablePort = start until end find { port => try { new ServerSocket(port).close() true diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml index 483190a75d..58cbeee331 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/assembly/distribution.xml @@ -244,9 +244,6 @@ org.slf4j:jul-to-slf4j:jar org.slf4j:slf4j-api:jar org.springframework.boot:spring-boot:jar - org.springframework.boot:spring-boot-actuator:jar - org.springframework.boot:spring-boot-actuator-autoconfigure:jar - org.springframework.boot:spring-boot-autoconfigure:jar org.springframework.boot:spring-boot-starter:jar org.springframework.boot:spring-boot-starter-actuator:jar org.springframework.boot:spring-boot-starter-aop:jar diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java index d4fc0a49e1..2de878a24c 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java @@ -152,7 +152,7 @@ public Message downloadEngineLog( outputStream.write(buffer, 0, bytesRead); } } catch (IOException e) { - logger.warn("Download EngineLog Failed Msg :", e); + logger.error("Download EngineLog Failed Msg :", e); response.reset(); response.setCharacterEncoding(Consts.UTF_8.toString()); response.setContentType("text/plain; charset=utf-8"); diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java index 111ad896e0..409f0d7e98 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/scheduled/EcmClearTask.java @@ -46,7 +46,7 @@ public void run() { cmdlist.add("sh"); cmdlist.add(shellPath + "linkis-ec-clear.sh"); try { - Utils.exec(cmdlist.toArray(new String[0]), 3000L); + Utils.exec(cmdlist.toArray(new String[0]), 1800000L); } catch (Exception e) { logger.warn("Shell linkis-ec-clear.sh execution failed, msg:" + e.getMessage()); } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.java deleted file mode 100644 index 3d62df5c90..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.operator; - -import org.apache.linkis.common.conf.CommonVars; -import org.apache.linkis.common.utils.Utils; -import org.apache.linkis.ecm.server.conf.ECMConfiguration; -import org.apache.linkis.ecm.server.exception.ECMErrorException; -import org.apache.linkis.manager.common.operator.Operator; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.io.input.ReversedLinesFileReader; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.tuple.Triple; - -import java.io.File; -import java.io.IOException; -import java.io.RandomAccessFile; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.text.MessageFormat; -import java.util.*; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary.*; - -public class EngineConnLogOperator implements Operator { - private static final Logger logger = LoggerFactory.getLogger(EngineConnLogOperator.class); - - public static final String OPERATOR_NAME = "engineConnLog"; - public static final CommonVars LOG_FILE_NAME = - CommonVars.apply("linkis.engineconn.log.filename", "stdout"); - public static final CommonVars MAX_LOG_FETCH_SIZE = - CommonVars.apply("linkis.engineconn.log.fetch.lines.max", 5000); - public static final CommonVars MAX_LOG_TAIL_START_SIZE = - CommonVars.apply("linkis.engineconn.log.tail.start.size"); - public static final CommonVars MULTILINE_PATTERN = - CommonVars.apply( - "linkis.engineconn.log.multiline.pattern", - "^\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}\\.\\d{3}"); - public static final CommonVars MULTILINE_MAX = - CommonVars.apply("linkis.engineconn.log.multiline.max", 500); - - @Override - public String[] getNames() { - return new String[] {OPERATOR_NAME}; - } - - @Override - public Map apply(Map parameters) { - File logPath = getLogPath(parameters); - int lastRows = getAs(parameters, "lastRows", 0); - int pageSize = getAs(parameters, "pageSize", 100); - int fromLine = getAs(parameters, "fromLine", 1); - boolean enableTail = getAs(parameters, "enableTail", false); - if (lastRows > EngineConnLogOperator.MAX_LOG_FETCH_SIZE.getValue()) { - throw new ECMErrorException( - CANNOT_FETCH_MORE_THAN.getErrorCode(), - MessageFormat.format( - CANNOT_FETCH_MORE_THAN.getErrorDesc(), - EngineConnLogOperator.MAX_LOG_FETCH_SIZE.getValue().toString())); - } else if (lastRows > 0) { - String logs = Utils.exec(new String[] {"tail", "-n", lastRows + "", logPath.getPath()}, 5000); - Map stringObjectHashMap = new HashMap<>(); - stringObjectHashMap.put("logs", logs.split("\n")); - stringObjectHashMap.put("rows", logs.length()); - return stringObjectHashMap; - } - - String ignoreKeywords = getAs(parameters, "ignoreKeywords", ""); - String[] ignoreKeywordList = - StringUtils.isNotEmpty(ignoreKeywords) ? ignoreKeywords.split(",") : new String[0]; - - String onlyKeywords = getAs(parameters, "onlyKeywords", ""); - String[] onlyKeywordList = - StringUtils.isNotEmpty(onlyKeywords) ? onlyKeywords.split(",") : new String[0]; - - RandomAccessFile randomReader = null; - ReversedLinesFileReader reversedReader = null; - try { - if (enableTail) { - logger.info("enable log operator from tail to read"); - reversedReader = new ReversedLinesFileReader(logPath, Charset.defaultCharset()); - } else { - randomReader = new RandomAccessFile(logPath, "r"); - } - - ArrayList logs = new ArrayList<>(pageSize); - int readLine = 0, skippedLine = 0, lineNum = 0; - boolean rowIgnore = false; - int ignoreLine = 0; - Pattern linePattern = Pattern.compile(EngineConnLogOperator.MULTILINE_PATTERN.getValue()); - - int maxMultiline = MULTILINE_MAX.getValue(); - String line = randomAndReversedReadLine(randomReader, reversedReader); - - while (readLine < pageSize && line != null) { - lineNum += 1; - if (skippedLine < fromLine - 1) { - skippedLine += 1; - } else { - if (rowIgnore) { - Matcher matcher = linePattern.matcher(line); - if (matcher.matches()) { - ignoreLine = 0; - rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList); - } else { - ignoreLine += 1; - if (ignoreLine >= maxMultiline) { - rowIgnore = false; - } - } - if (!matcher.matches()) { - rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList); - } - } else { - rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList); - } - if (!rowIgnore) { - logs.add(line); - readLine += 1; - } - } - line = randomAndReversedReadLine(randomReader, reversedReader); - } - - if (enableTail) { - Collections.reverse(logs); - } - - Map resultMap = new HashMap<>(); - resultMap.put("logPath", logPath.getPath()); - resultMap.put("logs", logs); - resultMap.put("endLine", lineNum); - resultMap.put("rows", readLine); - return resultMap; - } catch (IOException e) { - logger.info("EngineConnLogOperator apply failed", e); - throw new ECMErrorException( - LOG_IS_NOT_EXISTS.getErrorCode(), LOG_IS_NOT_EXISTS.getErrorDesc()); - } finally { - IOUtils.closeQuietly(randomReader); - IOUtils.closeQuietly(reversedReader); - } - } - - private String randomAndReversedReadLine( - RandomAccessFile randomReader, ReversedLinesFileReader reversedReader) throws IOException { - if (randomReader != null) { - String line = randomReader.readLine(); - if (line != null) { - return new String(line.getBytes(StandardCharsets.ISO_8859_1), Charset.defaultCharset()); - } else { - return null; - } - } else { - return reversedReader.readLine(); - } - } - - protected File getLogPath(Map parameters) { - String logType = getAs(parameters, "logType", EngineConnLogOperator.LOG_FILE_NAME.getValue()); - - Triple engineConnInfo = getEngineConnInfo(parameters); - String engineConnLogDir = engineConnInfo.getLeft(); - String engineConnInstance = engineConnInfo.getMiddle(); - String ticketId = engineConnInfo.getRight(); - - File logPath = new File(engineConnLogDir, logType); - if (!logPath.exists() || !logPath.isFile()) { - throw new ECMErrorException( - LOGFILE_IS_NOT_EXISTS.getErrorCode(), - MessageFormat.format(LOGFILE_IS_NOT_EXISTS.getErrorDesc(), logPath.toString())); - } - logger.info( - String.format( - "Try to fetch EngineConn(id: %s, instance: %s) logs from %s.", - ticketId, engineConnInstance, logPath.getPath())); - return logPath; - } - - protected Triple getEngineConnInfo(Map parameters) { - String logDIrSuffix = getAs(parameters, "logDirSuffix", ""); - String engineConnLogDir = - ECMConfiguration.ENGINECONN_ROOT_DIR() + File.separator + logDIrSuffix; - String ticketId = getAs(parameters, "ticketId", ""); - String engineConnInstance = ""; - return Triple.of(engineConnLogDir, engineConnInstance, ticketId); - } - - private boolean includeLine(String line, String[] onlyKeywordList, String[] ignoreKeywordList) { - boolean accept = - ignoreKeywordList.length == 0 || !Arrays.stream(ignoreKeywordList).anyMatch(line::contains); - if (accept) { - accept = - onlyKeywordList.length == 0 || Arrays.stream(onlyKeywordList).anyMatch(line::contains); - } - return accept; - } -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.java deleted file mode 100644 index 6d3548274c..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.operator; - -import org.apache.linkis.common.exception.WarnException; -import org.apache.linkis.common.utils.Utils; -import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary; -import org.apache.linkis.ecm.server.exception.ECMErrorException; -import org.apache.linkis.ecm.server.exception.ECMWarnException; - -import org.apache.commons.lang3.tuple.Triple; - -import java.io.File; -import java.text.MessageFormat; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary.LOG_IS_NOT_EXISTS; - -public class EngineConnYarnLogOperator extends EngineConnLogOperator { - private static final Logger logger = LoggerFactory.getLogger(EngineConnYarnLogOperator.class); - - private static final String YARN_LOG_OPERATOR_NAME = "engineConnYarnLog"; - - @Override - public String[] getNames() { - return new String[] {EngineConnYarnLogOperator.YARN_LOG_OPERATOR_NAME}; - } - - @Override - public Map apply(Map parameters) { - Map result = new HashMap<>(); - try { - result = super.apply(parameters); - } finally { - Object logPath = result.get("logPath"); - if (logPath instanceof String) { - File logFile = new File((String) logPath); - if (logFile.exists() && logFile.getName().startsWith(".")) { - // If is a temporary file, drop it - logger.info(String.format("Delete the temporary yarn log file: [%s]", logPath)); - if (!logFile.delete()) { - logger.warn(String.format("Fail to delete the temporary yarn log file: [%s]", logPath)); - } - } - } - } - return result; - } - - @Override - public File getLogPath(Map parameters) { - String ticketId, engineConnInstance, engineConnLogDir; - Triple engineConnInfo = getEngineConnInfo(parameters); - ticketId = engineConnInfo.getRight(); - engineConnInstance = engineConnInfo.getMiddle(); - engineConnLogDir = engineConnInfo.getLeft(); - - File rootLogDir = new File(engineConnLogDir); - if (!rootLogDir.exists() || !rootLogDir.isDirectory()) { - throw new ECMWarnException( - LOG_IS_NOT_EXISTS.getErrorCode(), - MessageFormat.format(LOG_IS_NOT_EXISTS.getErrorDesc(), rootLogDir)); - } - - String creator = getAsThrow(parameters, "creator"); - String applicationId = getAsThrow(parameters, "yarnApplicationId"); - File logPath = new File(engineConnLogDir, "yarn_" + applicationId); - if (!logPath.exists()) { - String tempLogFile = - String.format( - ".yarn_%s_%d_%d", - applicationId, System.currentTimeMillis(), Thread.currentThread().getId()); - try { - String command = - String.format( - "yarn logs -applicationId %s >> %s/%s", applicationId, rootLogDir, tempLogFile); - logger.info(String.format("Fetch yarn logs to temporary file: [%s]", command)); - - ProcessBuilder processBuilder = new ProcessBuilder(sudoCommands(creator, command)); - processBuilder.environment().putAll(System.getenv()); - processBuilder.redirectErrorStream(false); - Process process = processBuilder.start(); - boolean waitFor = process.waitFor(5, TimeUnit.SECONDS); - logger.trace(String.format("waitFor: %b, result: %d", waitFor, process.exitValue())); - if (waitFor && process.waitFor() == 0) { - command = - String.format( - "mv %s/%s %s/yarn_%s", rootLogDir, tempLogFile, rootLogDir, applicationId); - logger.info(String.format("Move and save yarn logs: [%s]", command)); - Utils.exec(sudoCommands(creator, command)); - } else { - logPath = new File(engineConnLogDir, tempLogFile); - if (!logPath.exists()) { - throw new WarnException( - -1, - String.format( - "Fetch yarn logs timeout, log aggregation has not completed or is not enabled")); - } - } - } catch (Exception e) { - throw new WarnException( - -1, - String.format( - "Fail to fetch yarn logs application: %s, message: %s", - applicationId, e.getMessage())); - } - } - if (!logPath.exists() || !logPath.isFile()) { - throw new ECMErrorException( - EngineconnServerErrorCodeSummary.LOGFILE_IS_NOT_EXISTS.getErrorCode(), - MessageFormat.format( - EngineconnServerErrorCodeSummary.LOGFILE_IS_NOT_EXISTS.getErrorDesc(), logPath)); - } - logger.info( - String.format( - "Try to fetch EngineConn(id: %s, instance: %s) yarn logs from %s in application id: %s", - ticketId, engineConnInstance, logPath.getPath(), applicationId)); - - return logPath; - } - - private String[] sudoCommands(String creator, String command) { - return new String[] { - "/bin/bash", - "-c", - "sudo su " + creator + " -c \"source ~/.bashrc 2>/dev/null; " + command + "\"" - }; - } -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java index 5a0ade21dc..2e351b00df 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java @@ -21,6 +21,7 @@ import org.apache.linkis.common.utils.Utils; import org.apache.linkis.ecm.server.conf.ECMConfiguration; import org.apache.linkis.ecm.server.service.EngineConnKillService; +import org.apache.linkis.ecm.utils.ECMCacheUtils; import org.apache.linkis.engineconn.common.conf.EngineConnConf; import org.apache.linkis.governance.common.utils.GovernanceUtils; import org.apache.linkis.manager.common.constant.AMConstant; @@ -61,6 +62,7 @@ public EngineStopResponse dealEngineConnStop(EngineStopRequest engineStopRequest String pid = null; if (AMConstant.PROCESS_MARK.equals(engineStopRequest.getIdentifierType()) && StringUtils.isNotBlank(engineStopRequest.getIdentifier())) { + ECMCacheUtils.putStopECToCache(engineStopRequest.getServiceInstance(), engineStopRequest); pid = engineStopRequest.getIdentifier(); } logger.info("dealEngineConnStop return pid: {}", pid); @@ -189,13 +191,11 @@ private String getYarnAppRegexByEngineType(String engineType) { case "sqoop": regex = EngineConnConf.SQOOP_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); break; + case "seatunnel": case "flink": case "hive": regex = EngineConnConf.HIVE_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); break; - case "seatunnel": - regex = EngineConnConf.SEATUNNEL_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); - break; default: regex = ""; } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/score/LabelScoreServiceInstance.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/utils/ECMCacheUtils.java similarity index 53% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/score/LabelScoreServiceInstance.java rename to linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/utils/ECMCacheUtils.java index 8e5b57678a..52d140055d 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/label/score/LabelScoreServiceInstance.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/utils/ECMCacheUtils.java @@ -15,37 +15,26 @@ * limitations under the License. */ -package org.apache.linkis.manager.label.score; +package org.apache.linkis.ecm.utils; import org.apache.linkis.common.ServiceInstance; -import org.apache.linkis.manager.common.entity.node.ScoreServiceInstance; +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; -public class LabelScoreServiceInstance implements ScoreServiceInstance { +import java.util.concurrent.TimeUnit; - private double score = 0d; - private ServiceInstance serviceInstance; +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; - public LabelScoreServiceInstance(ServiceInstance instance) { - this.serviceInstance = instance; - } - - @Override - public double getScore() { - return score; - } - - @Override - public void setScore(double score) { - this.score = score; - } +public class ECMCacheUtils { + private static Cache ecStopRequestCache = + CacheBuilder.newBuilder().expireAfterWrite(10, TimeUnit.SECONDS).build(); - @Override - public ServiceInstance getServiceInstance() { - return serviceInstance; + public static void putStopECToCache( + ServiceInstance serviceInstance, EngineStopRequest engineStopRequest) { + ecStopRequestCache.put(serviceInstance, engineStopRequest); } - @Override - public void setServiceInstance(ServiceInstance serviceInstance) { - this.serviceInstance = serviceInstance; + public static EngineStopRequest getStopEC(ServiceInstance serviceInstance) { + return ecStopRequestCache.getIfPresent(serviceInstance); } } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala index 4c02bfff05..dbd24e3562 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/conf/ECMConfiguration.scala @@ -20,7 +20,6 @@ package org.apache.linkis.ecm.server.conf import org.apache.linkis.common.conf.{CommonVars, TimeType} import org.apache.linkis.common.utils.ByteTimeUtils import org.apache.linkis.governance.common.conf.GovernanceCommonConf -import org.apache.linkis.manager.common.conf.RMConfiguration import java.io.File import java.util.concurrent.TimeUnit @@ -116,16 +115,8 @@ object ECMConfiguration { val ECM_PROCESS_SCRIPT_KILL: Boolean = CommonVars[Boolean]("wds.linkis.ecm.script.kill.engineconn", true).getValue - val ECM_YARN_CLUSTER_NAME: String = - CommonVars( - "wds.linkis.ecm.yarn.cluster.name", - RMConfiguration.DEFAULT_YARN_CLUSTER_NAME.getValue - ).getValue - - val ECM_YARN_CLUSTER_TYPE: String = - CommonVars( - "wds.linkis.ecm.yarn.cluster.type", - RMConfiguration.DEFAULT_YARN_TYPE.getValue - ).getValue - + val EC_CAN_RETRY_EXIT_CODES: Array[Int] = + CommonVars[String]("linkis.ecm.can.retry.exit.codes", "143").getValue + .split(",") + .map(_.toInt); } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/exception/ECMErrorException.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/exception/ECMErrorException.scala index 1b397d04e5..e50b2ff84f 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/exception/ECMErrorException.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/exception/ECMErrorException.scala @@ -17,22 +17,15 @@ package org.apache.linkis.ecm.server.exception -import org.apache.linkis.common.exception.{ - ErrorException, - ExceptionLevel, - LinkisRuntimeException, - WarnException -} +import org.apache.linkis.common.exception.{ErrorException, WarnException} -class ECMErrorException(errorCode: Int, desc: String) - extends LinkisRuntimeException(errorCode, desc) { +class ECMErrorException(errorCode: Int, desc: String) extends ErrorException(errorCode, desc) { def this(errorCode: Int, desc: String, t: Throwable) { this(errorCode, desc) this.initCause(t) } - override def getLevel: ExceptionLevel = ExceptionLevel.ERROR } class ECMWarnException(errCode: Int, desc: String) extends WarnException(errCode, desc) diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/hook/JarUDFLoadECMHook.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/hook/JarUDFLoadECMHook.scala index 39efe74cc8..26cccfc9c3 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/hook/JarUDFLoadECMHook.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/hook/JarUDFLoadECMHook.scala @@ -113,6 +113,7 @@ class JarUDFLoadECMHook extends ECMHook with Logging { ) } } + // LaunchConstants.addPathToClassPath(pel.environment, udfDir + File.separator + "*") val udfJars = fileNameSet.map(udfDir + File.separator + _) pel.environment.put(Environment.UDF_JARS.toString, udfJars.mkString(",")); } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala new file mode 100644 index 0000000000..66327dadcf --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala @@ -0,0 +1,193 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.server.operator + +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._ +import org.apache.linkis.ecm.server.conf.ECMConfiguration +import org.apache.linkis.ecm.server.exception.ECMErrorException +import org.apache.linkis.ecm.server.service.LocalDirsHandleService +import org.apache.linkis.manager.common.operator.Operator + +import org.apache.commons.io.IOUtils +import org.apache.commons.io.input.ReversedLinesFileReader +import org.apache.commons.lang3.StringUtils + +import java.io.{File, RandomAccessFile} +import java.nio.charset.{Charset, StandardCharsets} +import java.text.MessageFormat +import java.util +import java.util.Collections + +import scala.util.matching.Regex + +class EngineConnLogOperator extends Operator with Logging { + + override def getNames: Array[String] = Array(EngineConnLogOperator.OPERATOR_NAME) + + override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { + val logPath = getLogPath + val lastRows = getAs("lastRows", 0) + val pageSize = getAs("pageSize", 100) + val fromLine = getAs("fromLine", 1) + val enableTail = getAs("enableTail", false) + if (lastRows > EngineConnLogOperator.MAX_LOG_FETCH_SIZE.getValue) { + throw new ECMErrorException( + CANNOT_FETCH_MORE_THAN.getErrorCode, + MessageFormat.format( + CANNOT_FETCH_MORE_THAN.getErrorDesc, + EngineConnLogOperator.MAX_LOG_FETCH_SIZE.getValue.toString + ) + ) + } else if (lastRows > 0) { + val logs = Utils.exec(Array("tail", "-n", lastRows + "", logPath.getPath), 5000).split("\n") + return Map("logs" -> logs, "rows" -> logs.length) + } + + val ignoreKeywords = getAs("ignoreKeywords", "") + val ignoreKeywordList = + if (StringUtils.isNotEmpty(ignoreKeywords)) ignoreKeywords.split(",") + else Array.empty[String] + val onlyKeywords = getAs("onlyKeywords", "") + val onlyKeywordList = + if (StringUtils.isNotEmpty(onlyKeywords)) onlyKeywords.split(",") else Array.empty[String] + var randomReader: RandomAccessFile = null + var reversedReader: ReversedLinesFileReader = null + if (enableTail) { + logger.info("enable log operator from tail to read") + reversedReader = new ReversedLinesFileReader(logPath, Charset.defaultCharset()) + } else { + randomReader = new RandomAccessFile(logPath, "r") + } + def randomAndReversedReadLine(): String = { + if (null != randomReader) { + val line = randomReader.readLine() + if (line != null) { + new String(line.getBytes(StandardCharsets.ISO_8859_1), Charset.defaultCharset()) + } else null + } else { + reversedReader.readLine() + } + } + val logs = new util.ArrayList[String](pageSize) + var readLine, skippedLine, lineNum = 0 + var rowIgnore = false + var ignoreLine = 0 + val linePattern = getLinePattern + val maxMultiline = EngineConnLogOperator.MULTILINE_MAX.getValue + Utils.tryFinally { + var line = randomAndReversedReadLine() + while (readLine < pageSize && line != null) { + lineNum += 1 + if (skippedLine < fromLine - 1) { + skippedLine += 1 + } else { + if (rowIgnore) { + linePattern match { + case reg: Regex => + if (reg.findFirstIn(line).isDefined) { + ignoreLine = 0 + rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList) + } else { + ignoreLine += 1 + if (ignoreLine >= maxMultiline) { + rowIgnore = false + } + } + case _ => rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList) + } + } else { + rowIgnore = !includeLine(line, onlyKeywordList, ignoreKeywordList) + } + if (!rowIgnore) { + logs.add(line) + readLine += 1 + } + } + line = randomAndReversedReadLine() + } + } { + IOUtils.closeQuietly(randomReader) + IOUtils.closeQuietly(reversedReader) + } + if (enableTail) Collections.reverse(logs) + Map("logPath" -> logPath.getPath, "logs" -> logs, "endLine" -> lineNum, "rows" -> readLine) + } + + protected def getLogPath(implicit parameters: Map[String, Any]): File = { + val (ticketId, engineConnInstance, engineConnLogDir) = getEngineConnInfo(parameters) + val logPath = + new File(engineConnLogDir, getAs("logType", EngineConnLogOperator.LOG_FILE_NAME.getValue)); + if (!logPath.exists() || !logPath.isFile) { + throw new ECMErrorException( + LOGFILE_IS_NOT_EXISTS.getErrorCode, + MessageFormat.format(LOGFILE_IS_NOT_EXISTS.getErrorDesc, logPath) + ) + } + logger.info( + s"Try to fetch EngineConn(id: $ticketId, instance: $engineConnInstance) logs from ${logPath.getPath}." + ) + logPath + } + + protected def getLinePattern: Regex = { + Option(EngineConnLogOperator.MULTILINE_PATTERN.getValue) match { + case Some(pattern) => pattern.r + case _ => null + } + } + + protected def getEngineConnInfo(implicit + parameters: Map[String, Any] + ): (String, String, String) = { + val logDIrSuffix = getAs("logDirSuffix", "") + val ecLogPath = ECMConfiguration.ENGINECONN_ROOT_DIR + File.separator + logDIrSuffix + val ticketId = getAs("ticketId", "") + (ticketId, "", ecLogPath) + } + + private def includeLine( + line: String, + onlyKeywordList: Array[String], + ignoreKeywordList: Array[String] + ): Boolean = { + var accept: Boolean = ignoreKeywordList.isEmpty || !ignoreKeywordList.exists(line.contains) + if (accept) { + accept = onlyKeywordList.isEmpty || onlyKeywordList.exists(line.contains) + } + accept + } + +} + +object EngineConnLogOperator { + val OPERATOR_NAME = "engineConnLog" + val LOG_FILE_NAME = CommonVars("linkis.engineconn.log.filename", "stdout") + val MAX_LOG_FETCH_SIZE = CommonVars("linkis.engineconn.log.fetch.lines.max", 5000) + + val MAX_LOG_TAIL_START_SIZE = CommonVars("linkis.engineconn.log.tail.start.size", 20000) + + // yyyy-MM-dd HH:mm:ss.SSS + val MULTILINE_PATTERN = CommonVars( + "linkis.engineconn.log.multiline.pattern", + "^\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}\\.\\d{3}" + ) + + val MULTILINE_MAX = CommonVars("linkis.engineconn.log.multiline.max", 500) +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala new file mode 100644 index 0000000000..7d8b5fc671 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala @@ -0,0 +1,194 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.server.operator + +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.io.FsPath +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._ +import org.apache.linkis.ecm.server.exception.ECMErrorException +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.utils.StorageUtils + +import org.apache.commons.lang3.StringUtils + +import java.io.File +import java.text.MessageFormat +import java.util +import java.util.concurrent.{Callable, ConcurrentHashMap, ExecutorService, Future, TimeUnit} + +import scala.collection.JavaConverters._ +import scala.util.matching.Regex + +class EngineConnYarnLogOperator extends EngineConnLogOperator { + + private implicit val fs: FileSystem = + FSFactory.getFs(StorageUtils.FILE).asInstanceOf[FileSystem] + + /** + * Yarn log fetchers + */ + private def yarnLogFetchers: ConcurrentHashMap[String, Future[String]] = + new ConcurrentHashMap[String, Future[String]]() + + override def getNames: Array[String] = Array(EngineConnYarnLogOperator.OPERATOR_NAME) + + override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { + var result: Map[String, Any] = Map() + Utils.tryFinally { + result = super.apply(parameters) + result + } {} + } + + override def getLogPath(implicit parameters: Map[String, Any]): File = { + val applicationId = getAsThrow[String]("yarnApplicationId", parameters) + val (ticketId, engineConnInstance, engineConnLogDir) = getEngineConnInfo(parameters) + val rootLogPath = EngineConnYarnLogOperator.YARN_LOG_STORAGE_PATH.getValue match { + case storePath if StringUtils.isNotBlank(storePath) => + val logPath = new FsPath(StorageUtils.FILE_SCHEMA + storePath + "/" + applicationId) + // Invoke to create directory + fs.mkdir(logPath) + // 777 permission + fs.setPermission(logPath, "rwxrwxrwx") + logPath + case _ => new FsPath(StorageUtils.FILE_SCHEMA + engineConnLogDir) + } + if (!fs.exists(rootLogPath) || !rootLogPath.toFile.isDirectory) { + throw new ECMErrorException( + LOG_IS_NOT_EXISTS.getErrorCode, + MessageFormat.format(LOG_IS_NOT_EXISTS.getErrorDesc, rootLogPath.getPath) + ) + } + val creator = getAsThrow[String]("creator", parameters) + var logPath = new FsPath( + StorageUtils.FILE_SCHEMA + rootLogPath.getPath + "/yarn_" + applicationId + ) + if (!fs.exists(logPath)) { + val fetcher = yarnLogFetchers.computeIfAbsent( + applicationId, + new util.function.Function[String, Future[String]] { + override def apply(v1: String): Future[String] = + requestToFetchYarnLogs(creator, applicationId, rootLogPath.getPath) + } + ) + // Just wait 5 seconds + Option(fetcher.get(5, TimeUnit.SECONDS)) match { + case Some(path) => logPath = new FsPath(StorageUtils.FILE_SCHEMA + path) + case _ => + } + + } + if (!fs.exists(logPath) || logPath.toFile.isDirectory) { + throw new ECMErrorException( + LOGFILE_IS_NOT_EXISTS.getErrorCode, + MessageFormat.format(LOGFILE_IS_NOT_EXISTS.getErrorDesc, logPath.getPath) + ) + } + logger.info( + s"Try to fetch EngineConn(id: $ticketId, instance: $engineConnInstance) yarn logs from ${logPath.getPath} in application id: $applicationId" + ) + logPath.toFile + } + + /** + * Not support line pattern in yarn log + * @return + */ + override protected def getLinePattern: Regex = null + + /** + * Request the log fetcher + * + * @param creator + * creator + * @param applicationId + * application id + * @param logPath + * log path + * @return + */ + private def requestToFetchYarnLogs( + creator: String, + applicationId: String, + yarnLogDir: String + ): Future[String] = { + EngineConnYarnLogOperator.YARN_LOG_FETCH_SCHEDULER.submit(new Callable[String] { + override def call(): String = { + val logPath = new FsPath(StorageUtils.FILE_SCHEMA + yarnLogDir + "/yarn_" + applicationId) + if (!fs.exists(logPath)) { + val tempLogFile = + s".yarn_${applicationId}_${System.currentTimeMillis()}_${Thread.currentThread().getId}" + Utils.tryCatch { + var command = + s"yarn logs -applicationId $applicationId >> $yarnLogDir/$tempLogFile" + logger.info(s"Fetch yarn logs to temporary file: [$command]") + val processBuilder = new ProcessBuilder(sudoCommands(creator, command): _*) + processBuilder.environment.putAll(sys.env.asJava) + processBuilder.redirectErrorStream(false) + val process = processBuilder.start() + val exitCode = process.waitFor() + logger.trace(s"Finish to fetch yan logs to temporary file, result: ${exitCode}") + if (exitCode == 0) { + command = s"mv $yarnLogDir/$tempLogFile $yarnLogDir/yarn_$applicationId" + logger.info(s"Move and save yarn logs(${applicationId}): [$command]") + Utils.exec(sudoCommands(creator, command)) + } + } { e: Throwable => + logger.error( + s"Fail to fetch yarn logs application: $applicationId, message: ${e.getMessage}" + ) + } + val tmpFile = new File(yarnLogDir, tempLogFile) + if (tmpFile.exists()) { + logger.info(s"Delete temporary file: [${tempLogFile}] in yarn logs fetcher") + tmpFile.delete() + } + } + // Remove future + yarnLogFetchers.remove(applicationId) + if (fs.exists(logPath)) logPath.getPath else null + } + }) + } + + private def sudoCommands(creator: String, command: String): Array[String] = { + Array( + "/bin/bash", + "-c", + "sudo su " + creator + " -c \"source ~/.bashrc 2>/dev/null; " + command + "\"" + ) + } + +} + +object EngineConnYarnLogOperator { + val OPERATOR_NAME = "engineConnYarnLog" + + // Specific the path to store the yarn logs + val YARN_LOG_STORAGE_PATH: CommonVars[String] = + CommonVars("linkis.engineconn.log.yarn.storage-path", "") + + val YARN_LOG_FETCH_THREAD: CommonVars[Int] = + CommonVars("linkis.engineconn.log.yarn.fetch.thread-num", 5) + + val YARN_LOG_FETCH_SCHEDULER: ExecutorService = + Utils.newFixedThreadPool(YARN_LOG_FETCH_THREAD.getValue + 1, "yarn_logs_fetch", false) + +} diff --git a/linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/restful/ListenerTest.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/ECMMetricsService.scala similarity index 91% rename from linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/restful/ListenerTest.java rename to linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/ECMMetricsService.scala index 11a592924a..62ae7c1fea 100644 --- a/linkis-public-enhancements/linkis-pes-client/src/test/java/org/apache/linkis/cs/client/test/restful/ListenerTest.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/ECMMetricsService.scala @@ -15,6 +15,6 @@ * limitations under the License. */ -package org.apache.linkis.cs.client.test.restful; +package org.apache.linkis.ecm.server.service -public class ListenerTest {} +trait ECMMetricsService {} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala index 390822df0d..df00ed4960 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala @@ -121,11 +121,10 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w Sender .getSender(MANAGER_SERVICE_NAME) .send( - new EngineConnStatusCallbackToAM( + EngineConnStatusCallbackToAM( conn.getServiceInstance, NodeStatus.Failed, - " wait init failed , reason " + ExceptionUtils.getRootCauseMessage(t), - true + " wait init failed , reason " + ExceptionUtils.getRootCauseMessage(t) ) ) conn.setStatus(NodeStatus.Failed) @@ -147,12 +146,10 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w throw t } LoggerUtils.removeJobIdMDC() - val label = LabelUtil.getEngingeConnRuntimeModeLabel(request.labels) val isYarnClusterMode: Boolean = if (null != label && label.getModeValue.equals(LabelValueConstant.YARN_CLUSTER_VALUE)) true else false - val engineNode = new AMEngineNode() engineNode.setLabels(conn.getLabels) engineNode.setServiceInstance(conn.getServiceInstance) diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala index e334a8ac29..ed1490294c 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/BmlResourceLocalizationService.scala @@ -120,6 +120,8 @@ class BmlResourceLocalizationService extends ResourceLocalizationService with Lo } } + private val bmlResourceSuffix = ".zip" + private def createDirIfNotExit(noSchemaPath: String): String = { val fsPath = new FsPath(schema + noSchemaPath) if (!fs.exists(fsPath)) { diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMMetricsService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMMetricsService.scala new file mode 100644 index 0000000000..5d7da6b91e --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMMetricsService.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.server.service.impl + +import org.apache.linkis.ecm.core.listener.{ECMEvent, ECMEventListener} +import org.apache.linkis.ecm.server.service.ECMMetricsService + +class DefaultECMMetricsService extends ECMMetricsService with ECMEventListener { + override def onEvent(event: ECMEvent): Unit = {} +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala index 51f223e476..4c7807dad1 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala @@ -17,7 +17,6 @@ package org.apache.linkis.ecm.server.service.impl -import org.apache.linkis.common.conf.Configuration import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.ecm.core.listener.{ECMEvent, ECMEventListener} import org.apache.linkis.ecm.server.conf.ECMConfiguration._ @@ -31,7 +30,6 @@ import org.apache.linkis.manager.common.protocol.em.{ StopEMRequest } import org.apache.linkis.manager.label.constant.LabelKeyConstant -import org.apache.linkis.manager.label.entity.SerializableLabel import org.apache.linkis.rpc.Sender import java.util @@ -52,25 +50,17 @@ class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener request } - def getLabelsFromArgs(params: Array[String]): util.Map[String, AnyRef] = { + private def getLabelsFromArgs(params: Array[String]): util.Map[String, AnyRef] = { import scala.collection.JavaConverters._ - val labelRegex = """label\.(.+)\.(.+)=(.+)""".r val labels = new util.HashMap[String, AnyRef]() - // TODO: magic labels.asScala += LabelKeyConstant.SERVER_ALIAS_KEY -> Collections.singletonMap( "alias", ENGINE_CONN_MANAGER_SPRING_NAME ) - - if (Configuration.IS_MULTIPLE_YARN_CLUSTER) { - labels.asScala += LabelKeyConstant.YARN_CLUSTER_KEY -> - (ECM_YARN_CLUSTER_TYPE + "_" + ECM_YARN_CLUSTER_NAME) - } - // TODO: group by key labels } - def getEMRegiterResourceFromConfiguration: NodeResource = { + private def getEMRegiterResourceFromConfiguration: NodeResource = { val maxResource = new LoadInstanceResource( ECMUtils.inferDefaultMemory(), ECM_MAX_CORES_AVAILABLE, @@ -107,11 +97,9 @@ class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener logger.info("start register ecm") val response = Sender.getSender(MANAGER_SERVICE_NAME).ask(request) response match { - case registerEMResponse: RegisterEMResponse => - if (!registerEMResponse.getIsSuccess) { - logger.error( - s"Failed to register info to linkis manager, reason: ${registerEMResponse.getMsg}" - ) + case RegisterEMResponse(isSuccess, msg) => + if (!isSuccess) { + logger.error(s"Failed to register info to linkis manager, reason: $msg") System.exit(1) } case _ => diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultOperateService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultOperateService.scala index 12b813d67f..3a541df8b9 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultOperateService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultOperateService.scala @@ -27,34 +27,26 @@ import org.apache.commons.lang3.exception.ExceptionUtils import org.springframework.stereotype.Service -import java.util +import scala.collection.JavaConverters.mapAsScalaMapConverter @Service class DefaultOperateService extends OperateService with Logging { @Receiver override def executeOperation(ecmOperateRequest: ECMOperateRequest): ECMOperateResponse = { - val parameters = ecmOperateRequest.getParameters() - val operator = Utils.tryCatch(OperatorFactory.apply().getOperatorRequest(parameters)) { t => - logger.error(s"Get operator failed, parameters is ${ecmOperateRequest.getParameters}.", t) - return new ECMOperateResponse( - new util.HashMap[String, Object](), - true, - ExceptionUtils.getRootCauseMessage(t) - ) + val parameters = ecmOperateRequest.parameters.asScala.toMap + val operator = Utils.tryCatch(OperatorFactory().getOperatorRequest(parameters)) { t => + logger.error(s"Get operator failed, parameters is ${ecmOperateRequest.parameters}.", t) + return ECMOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) } logger.info( - s"Try to execute operator ${operator.getClass.getSimpleName} with parameters ${ecmOperateRequest.getParameters}." + s"Try to execute operator ${operator.getClass.getSimpleName} with parameters ${ecmOperateRequest.parameters}." ) val result = Utils.tryCatch(operator(parameters)) { t => logger.error(s"Execute ${operator.getClass.getSimpleName} failed.", t) - return new ECMOperateResponse( - new util.HashMap[String, Object](), - true, - ExceptionUtils.getRootCauseMessage(t) - ) + return ECMOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) } - new ECMOperateResponse(result) + ECMOperateResponse(result) } } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala index 360bca269d..b20590f04a 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala @@ -26,6 +26,7 @@ import org.apache.linkis.ecm.server.conf.ECMConfiguration import org.apache.linkis.ecm.server.conf.ECMConfiguration.MANAGER_SERVICE_NAME import org.apache.linkis.ecm.server.listener.EngineConnStopEvent import org.apache.linkis.ecm.server.service.LocalDirsHandleService +import org.apache.linkis.ecm.utils.ECMCacheUtils import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} import org.apache.linkis.manager.common.constant.AMConstant import org.apache.linkis.manager.common.entity.enumeration.NodeStatus @@ -90,20 +91,36 @@ abstract class ProcessEngineConnLaunchService extends AbstractEngineConnLaunchSe ) } if (exitCode != 0) { - val canRetry = if (errorMsg.isEmpty) true else false - logger.warn( - s"Failed to start ec ${engineConn.getServiceInstance}, status shutting down exit code ${exitCode}, canRetry ${canRetry}, logPath ${logPath}" - ) - Sender - .getSender(MANAGER_SERVICE_NAME) - .send( - new EngineConnStatusCallbackToAM( - engineConn.getServiceInstance, - NodeStatus.ShuttingDown, - "Failed to start EngineConn, reason: " + errorMsg + s"\n You can go to this path($logPath) to find the reason or ask the administrator for help", - canRetry + val stopRequest = ECMCacheUtils.getStopEC(engineConn.getServiceInstance) + if ( + null != stopRequest && engineConn.getPid != null && engineConn.getPid.equals( + stopRequest.getIdentifier ) + ) { + logger.info( + s"EC ${engineConn.getServiceInstance} exit should by kill stop request $stopRequest, do not report status" + ) + } else { + val canRetry = + if (errorMsg.isEmpty || ECMConfiguration.EC_CAN_RETRY_EXIT_CODES.contains(exitCode)) { + true + } else { + false + } + logger.warn( + s"Failed to start ec ${engineConn.getServiceInstance}, status shutting down exit code ${exitCode}, canRetry ${canRetry}, logPath ${logPath}" ) + Sender + .getSender(MANAGER_SERVICE_NAME) + .send( + EngineConnStatusCallbackToAM( + engineConn.getServiceInstance, + NodeStatus.ShuttingDown, + "Failed to start EngineConn, reason: " + errorMsg + s"\n You can go to this path($logPath) to find the reason or ask the administrator for help", + canRetry + ) + ) + } engineConn.setStatus(NodeStatus.ShuttingDown) } else { engineConn.setStatus(NodeStatus.Success) diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/test/java/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterServiceTest.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/test/java/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterServiceTest.java deleted file mode 100644 index f65fd68780..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/test/java/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterServiceTest.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl; - -import org.apache.linkis.common.ServiceInstance; -import org.apache.linkis.manager.common.protocol.em.RegisterEMRequest; -import org.apache.linkis.manager.label.constant.LabelKeyConstant; -import org.apache.linkis.rpc.serializer.ProtostuffSerializeUtil; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.junit.jupiter.api.Test; - -import static org.apache.linkis.manager.label.conf.LabelCommonConfig.ENGINE_CONN_MANAGER_SPRING_NAME; - -public class DefaultECMRegisterServiceTest { - @Test - void testECM() { - DefaultECMRegisterService defaultECMRegisterService = new DefaultECMRegisterService(); - RegisterEMRequest request = new RegisterEMRequest(); - ServiceInstance instance = new ServiceInstance(); - instance.setInstance("127.0.0.1:9001"); - instance.setApplicationName("ecm"); - request.setUser("hadoop"); - request.setServiceInstance(instance); - request.setAlias(instance.getApplicationName()); - - Map labels = new HashMap<>(); - labels.put( - LabelKeyConstant.SERVER_ALIAS_KEY, - Collections.singletonMap("alias", ENGINE_CONN_MANAGER_SPRING_NAME)); - request.setLabels(defaultECMRegisterService.getLabelsFromArgs(null)); - // the ECMUtils.inferDefaultMemory() will throw error disable the test - // request.setNodeResource(defaultECMRegisterService.getEMRegiterResourceFromConfiguration()); - String res = ProtostuffSerializeUtil.serialize(request); - ProtostuffSerializeUtil.deserialize(res, RegisterEMRequest.class); - } -} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/java/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.java b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/java/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.java deleted file mode 100644 index 3f98e6a988..0000000000 --- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/java/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineconn.once.executor.operator; - -import org.apache.linkis.engineconn.common.exception.EngineConnException; -import org.apache.linkis.engineconn.once.executor.OnceExecutor; -import org.apache.linkis.engineconn.once.executor.OperableOnceExecutor; -import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorManager$; -import org.apache.linkis.manager.common.operator.Operator; -import org.apache.linkis.manager.common.operator.OperatorFactory; -import org.apache.linkis.protocol.engine.JobProgressInfo; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class OperableOnceEngineConnOperator implements Operator { - - public static final String PROGRESS_OPERATOR_NAME = "engineConnProgress"; - - public static final String METRICS_OPERATOR_NAME = "engineConnMetrics"; - - public static final String DIAGNOSIS_OPERATOR_NAME = "engineConnDiagnosis"; - - @Override - public String[] getNames() { - return new String[] {PROGRESS_OPERATOR_NAME, METRICS_OPERATOR_NAME, DIAGNOSIS_OPERATOR_NAME}; - } - - @Override - public Map apply(Map parameters) { - String operatorName = OperatorFactory.apply().getOperatorName(parameters); - OnceExecutor reportExecutor = OnceExecutorManager$.MODULE$.getInstance().getReportExecutor(); - if (reportExecutor instanceof OperableOnceExecutor) { - OperableOnceExecutor operableOnceExecutor = (OperableOnceExecutor) reportExecutor; - switch (operatorName) { - case PROGRESS_OPERATOR_NAME: - List> progressInfoMap = new ArrayList<>(); - JobProgressInfo[] progressInfoList = operableOnceExecutor.getProgressInfo(); - if (progressInfoList != null && progressInfoList.length != 0) { - for (JobProgressInfo progressInfo : progressInfoList) { - Map infoMap = new HashMap<>(); - infoMap.put("id", progressInfo.id()); - infoMap.put("totalTasks", progressInfo.totalTasks()); - infoMap.put("runningTasks", progressInfo.runningTasks()); - infoMap.put("failedTasks", progressInfo.failedTasks()); - infoMap.put("succeedTasks", progressInfo.succeedTasks()); - progressInfoMap.add(infoMap); - } - } - Map resultMap = new HashMap<>(); - resultMap.put("progress", operableOnceExecutor.getProgress()); - resultMap.put("progressInfo", progressInfoMap); - return resultMap; - case METRICS_OPERATOR_NAME: - return new HashMap() { - { - put("metrics", operableOnceExecutor.getMetrics()); - } - }; - case DIAGNOSIS_OPERATOR_NAME: - return new HashMap() { - { - put("diagnosis", operableOnceExecutor.getDiagnosis()); - } - }; - default: - throw new EngineConnException( - 20308, "This engineConn don't support " + operatorName + " operator."); - } - } else { - throw new EngineConnException( - 20308, "This engineConn don't support " + operatorName + " operator."); - } - } -} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutor.scala index 30faaa8294..dee4a885dd 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutor.scala @@ -186,10 +186,8 @@ trait ManageableOnceExecutor extends AccessibleExecutor with OnceExecutor with R logger.error(msg) Utils.tryFinally { this.ensureAvailable(transition(NodeStatus.Failed)) - } { close() - stopOnceExecutor(msg) - } + }(stopOnceExecutor(msg)) true } @@ -199,10 +197,8 @@ trait ManageableOnceExecutor extends AccessibleExecutor with OnceExecutor with R logger.warn(msg) Utils.tryFinally { this.ensureAvailable(transition(NodeStatus.Success)) - } { close() - stopOnceExecutor(msg) - } + }(stopOnceExecutor(msg)) true } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutorExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutorExecutionContext.scala index 5d66212069..c25d2f580c 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutorExecutionContext.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/OnceExecutorExecutionContext.scala @@ -22,7 +22,7 @@ import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetWriter} import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.executor.ExecutorExecutionContext import org.apache.linkis.governance.common.entity.job.OnceExecutorContent -import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriterFactory} +import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriter} class OnceExecutorExecutionContext( engineCreationContext: EngineCreationContext, @@ -40,14 +40,14 @@ class OnceExecutorExecutionContext( ): ResultSet[_ <: MetaData, _ <: Record] = resultSetFactory.getResultSetByType(resultSetType) - override protected def getDefaultResultSetByType: String = resultSetFactory.getResultSetType()(0) + override protected def getDefaultResultSetByType: String = resultSetFactory.getResultSetType(0) override protected def newResultSetWriter( resultSet: ResultSet[_ <: MetaData, _ <: Record], resultSetPath: FsPath, alias: String - ): org.apache.linkis.common.io.resultset.ResultSetWriter[_ <: MetaData, _ <: Record] = - ResultSetWriterFactory.getResultSetWriter( + ): ResultSetWriter[_ <: MetaData, _ <: Record] = + ResultSetWriter.getResultSetWriter( resultSet, 0, resultSetPath, diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.scala b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.scala new file mode 100644 index 0000000000..ae1828d1c0 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/operator/OperableOnceEngineConnOperator.scala @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.once.executor.operator + +import org.apache.linkis.engineconn.common.exception.EngineConnException +import org.apache.linkis.engineconn.once.executor.OperableOnceExecutor +import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorManager +import org.apache.linkis.manager.common.operator.{Operator, OperatorFactory} + +class OperableOnceEngineConnOperator extends Operator { + + import OperableOnceEngineConnOperator._ + + override def getNames: Array[String] = + Array(PROGRESS_OPERATOR_NAME, METRICS_OPERATOR_NAME, DIAGNOSIS_OPERATOR_NAME) + + override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { + val operatorName = OperatorFactory().getOperatorName(parameters) + OnceExecutorManager.getInstance.getReportExecutor match { + case operableOnceExecutor: OperableOnceExecutor => + operatorName match { + case PROGRESS_OPERATOR_NAME => + val progressInfo = operableOnceExecutor.getProgressInfo + val progressInfoMap = if (progressInfo != null && progressInfo.nonEmpty) { + progressInfo.map(progressInfo => + Map( + "id" -> progressInfo.id, + "totalTasks" -> progressInfo.totalTasks, + "runningTasks" -> progressInfo.runningTasks, + "failedTasks" -> progressInfo.failedTasks, + "succeedTasks" -> progressInfo.succeedTasks + ) + ) + } else Array.empty[Map[String, Any]] + Map("progress" -> operableOnceExecutor.getProgress, "progressInfo" -> progressInfoMap) + case METRICS_OPERATOR_NAME => + Map("metrics" -> operableOnceExecutor.getMetrics) + case DIAGNOSIS_OPERATOR_NAME => + Map("diagnosis" -> operableOnceExecutor.getDiagnosis) + case _ => + throw EngineConnException( + 20308, + s"This engineConn don't support $operatorName operator." + ) + } + case _ => + throw EngineConnException(20308, s"This engineConn don't support $operatorName operator.") + } + } + +} + +object OperableOnceEngineConnOperator { + val PROGRESS_OPERATOR_NAME = "engineConnProgress" + val METRICS_OPERATOR_NAME = "engineConnMetrics" + val DIAGNOSIS_OPERATOR_NAME = "engineConnDiagnosis" +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala index cf8b9c00b8..9af394da80 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncConcurrentComputationExecutor.scala @@ -20,7 +20,10 @@ package org.apache.linkis.engineconn.computation.executor.async import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.Utils +import org.apache.linkis.engineconn.acessible.executor.info.DefaultNodeHealthyInfoManager import org.apache.linkis.engineconn.acessible.executor.listener.event.TaskResponseErrorEvent +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils.currentEngineIsUnHealthy import org.apache.linkis.engineconn.common.conf.EngineConnConf import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask import org.apache.linkis.engineconn.computation.executor.execute.{ @@ -29,13 +32,16 @@ import org.apache.linkis.engineconn.computation.executor.execute.{ } import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.ConcurrentExecutor import org.apache.linkis.engineconn.executor.listener.{ EngineConnSyncListenerBus, ExecutorListenerBusContext } import org.apache.linkis.governance.common.entity.ExecutionNodeStatus -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel import org.apache.linkis.protocol.engine.JobProgressInfo import org.apache.linkis.scheduler.executer._ import org.apache.linkis.scheduler.listener.JobListener @@ -48,6 +54,8 @@ import org.apache.commons.lang3.exception.ExceptionUtils import java.util import java.util.concurrent.ConcurrentHashMap +import DataWorkCloudApplication.getApplicationContext + abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: Int = 1000) extends ComputationExecutor(outputPrintLimit) with ConcurrentExecutor @@ -97,9 +105,6 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: }) } { e => logger.info("failed to do with hook", e) - engineExecutionContext.appendStdout( - LogUtils.generateWarn(s"failed execute hook: ${ExceptionUtils.getStackTrace(e)}") - ) } if (hookedCode.length > 100) { logger.info(s"hooked after code: ${hookedCode.substring(0, 100)} ....") @@ -207,6 +212,7 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: s"Executor is busy but still got new task ! Running task num : ${getRunningTask}" ) } + runningTasks.increase() if (getRunningTask >= getConcurrentLimit) synchronized { if (getRunningTask >= getConcurrentLimit && NodeStatus.isIdle(getStatus)) { logger.info( @@ -215,13 +221,25 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: transition(NodeStatus.Busy) } } - runningTasks.increase() } override def onJobCompleted(job: Job): Unit = { + runningTasks.decrease() job match { case asyncEngineConnJob: AsyncEngineConnJob => + val jobId = JobUtils.getJobIdFromMap(asyncEngineConnJob.getEngineConnTask.getProperties) + LoggerUtils.setJobIdMDC(jobId) + + if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) synchronized { + if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) { + logger.info( + s"running task($getRunningTask) < concurrent limit $getConcurrentLimit, now to mark engine to Unlock " + ) + transition(NodeStatus.Unlock) + } + } + job.getState match { case Succeed => succeedTasks.increase() @@ -241,22 +259,42 @@ abstract class AsyncConcurrentComputationExecutor(override val outputPrintLimit: } removeJob(asyncEngineConnJob.getEngineConnTask.getTaskId) clearTaskCache(asyncEngineConnJob.getEngineConnTask.getTaskId) - + // execute once should try to shutdown + if ( + asyncEngineConnJob.getEngineConnTask.getLables.exists(_.isInstanceOf[ExecuteOnceLabel]) + ) { + if (!hasTaskRunning()) { + logger.warn( + s"engineConnTask(${asyncEngineConnJob.getEngineConnTask.getTaskId}) is execute once, now to mark engine to Finished" + ) + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } + // unhealthy node should try to shutdown + if (!hasTaskRunning() && currentEngineIsUnHealthy()) { + logger.info( + s"engineConnTask(${asyncEngineConnJob.getEngineConnTask.getTaskId}) is unHealthy, now to mark engine to Finished" + ) + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + LoggerUtils.setJobIdMDC(jobId) case _ => } - if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) synchronized { - if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) { - logger.info( - s"running task($getRunningTask) < concurrent limit $getConcurrentLimit, now to mark engine to Unlock " - ) - transition(NodeStatus.Unlock) - } - } } override def hasTaskRunning(): Boolean = { getRunningTask > 0 } + override def transition(toStatus: NodeStatus): Unit = { + if (getRunningTask >= getConcurrentLimit && NodeStatus.Unlock == toStatus) { + logger.info( + s"running task($getRunningTask) > concurrent limit:$getConcurrentLimit, can not to mark EC to Unlock" + ) + return + } + super.transition(toStatus) + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala index cb04f364fb..98f04daaa2 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala @@ -20,6 +20,7 @@ package org.apache.linkis.engineconn.computation.executor.bml import org.apache.linkis.bml.client.{BmlClient, BmlClientFactory} import org.apache.linkis.common.exception.ErrorException import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.common.conf.EngineConnConf import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook @@ -62,6 +63,7 @@ class BmlEnginePreExecuteHook extends ComputationExecutorHook with Logging { ): String = { val props = engineExecutionContext.getProperties if (null != props && props.containsKey(GovernanceConstant.TASK_RESOURCES_STR)) { +// val workDir = ComputationEngineUtils.getCurrentWorkDir val jobId = engineExecutionContext.getJobId props.get(GovernanceConstant.TASK_RESOURCES_STR) match { case resources: util.List[Object] => @@ -71,6 +73,8 @@ class BmlEnginePreExecuteHook extends ComputationExecutorHook with Logging { val resourceId = resource.get(GovernanceConstant.TASK_RESOURCE_ID_STR).toString val version = resource.get(GovernanceConstant.TASK_RESOURCE_VERSION_STR).toString val fullPath = fileName +// if (workDir.endsWith(seperator)) pathType + workDir + fileName +// else pathType + workDir + seperator + fileName val response = Utils.tryCatch { bmlClient.downloadShareResource(processUser, resourceId, version, fullPath, true) } { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala index bcd423fd21..a3f7bb49e3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala @@ -60,7 +60,7 @@ object ComputationExecutorConf { ).getValue val ENGINE_CONCURRENT_THREAD_NUM = CommonVars( - "wds.linkis.engineconn.concurrent.thread.num", + "linkis.engineconn.concurrent.thread.num", 20, "Maximum thread pool of the concurrent EC" ) @@ -130,4 +130,10 @@ object ComputationExecutorConf { val ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_LENGTH = CommonVars("linkis.ec.send.log.entrance.limit.length", 2000) + val ENGINE_KERBEROS_AUTO_REFRESH_ENABLED = + CommonVars("linkis.ec.kerberos.auto.refresh.enabled", false).getValue + + val CLOSE_RS_OUTPUT_WHEN_RESET_BY_DEFAULT_ENABLED = + CommonVars("linkis.ec.rs.close.when.reset.enabled", true).getValue + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableRegister.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableRegister.scala index 8b9f95d66d..cc899cece4 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableRegister.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableRegister.scala @@ -88,7 +88,7 @@ object CSTableRegister extends Logging { val csColumns = columns.map { column => val csColumn = new CSColumn csColumn.setName(column.columnName) - csColumn.setType(column.dataType.getTypeName) + csColumn.setType(column.dataType.typeName) csColumn.setComment(column.comment) csColumn } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableResultSetWriter.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableResultSetWriter.scala index 891f37f27e..163d7cf57e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableResultSetWriter.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSTableResultSetWriter.scala @@ -67,7 +67,7 @@ class CSTableResultSetWriter( val csColumns = getMetaData.asInstanceOf[TableMetaData].columns.map { column => val csColumn = new CSColumn csColumn.setName(column.columnName) - csColumn.setType(column.dataType.getTypeName) + csColumn.setType(column.dataType.typeName) csColumn.setComment(column.comment) csColumn } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala index 86ad2a1bf5..592d225a34 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala @@ -21,14 +21,17 @@ import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor +import org.apache.linkis.engineconn.acessible.executor.info.DefaultNodeHealthyInfoManager import org.apache.linkis.engineconn.acessible.executor.listener.event.{ TaskLogUpdateEvent, TaskResponseErrorEvent, TaskStatusChangedEvent } +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils.currentEngineIsUnHealthy import org.apache.linkis.engineconn.common.conf.{EngineConnConf, EngineConnConstant} import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask +import org.apache.linkis.engineconn.computation.executor.exception.HookExecuteException import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook import org.apache.linkis.engineconn.computation.executor.metrics.ComputationEngineConnMetrics import org.apache.linkis.engineconn.computation.executor.upstream.event.TaskStatusChangedForUpstreamMonitorEvent @@ -41,7 +44,7 @@ import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.governance.common.paser.CodeParser import org.apache.linkis.governance.common.protocol.task.{EngineConcurrentInfo, RequestTask} import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} import org.apache.linkis.manager.label.entity.engine.{ CodeLanguageLabel, EngineType, @@ -61,6 +64,7 @@ import java.util.concurrent.atomic.AtomicInteger import scala.collection.JavaConverters._ +import DataWorkCloudApplication.getApplicationContext import com.google.common.cache.{Cache, CacheBuilder} abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) @@ -71,6 +75,7 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) private val listenerBusContext = ExecutorListenerBusContext.getExecutorListenerBusContext() + // private val taskMap: util.Map[String, EngineConnTask] = new ConcurrentHashMap[String, EngineConnTask](8) private val taskCache: Cache[String, EngineConnTask] = CacheBuilder .newBuilder() .expireAfterAccess(EngineConnConf.ENGINE_TASK_EXPIRE_TIME.getValue, TimeUnit.MILLISECONDS) @@ -85,13 +90,21 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) protected val runningTasks: Count = new Count + protected val pendingTasks: Count = new Count + protected val succeedTasks: Count = new Count protected val failedTasks: Count = new Count protected var lastTask: EngineConnTask = _ - private val MAX_TASK_EXECUTE_NUM = ComputationExecutorConf.ENGINE_MAX_TASK_EXECUTE_NUM.getValue + private val MAX_TASK_EXECUTE_NUM = if (null != EngineConnObject.getEngineCreationContext) { + ComputationExecutorConf.ENGINE_MAX_TASK_EXECUTE_NUM.getValue( + EngineConnObject.getEngineCreationContext.getOptions + ) + } else { + ComputationExecutorConf.ENGINE_MAX_TASK_EXECUTE_NUM.getValue + } private val CLOSE_LOCKER = new Object @@ -157,6 +170,8 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) super.close() } + // override def getName: String = ComputationExecutorConf.DEFAULT_COMPUTATION_NAME + protected def ensureOp[A](f: => A): A = if (!isEngineInitialized) { f } else ensureIdle(f) @@ -167,6 +182,11 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) engineConnTask: EngineConnTask, executeResponse: ExecuteResponse ): Unit = { + Utils.tryAndWarn { + ComputationExecutorHook.getComputationExecutorHooks.foreach { hook => + hook.afterExecutorExecute(engineConnTask, executeResponse) + } + } val executorNumber = getSucceedNum + getFailedNum if ( MAX_TASK_EXECUTE_NUM > 0 && runningTasks @@ -175,6 +195,13 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) logger.error(s"Task has reached max execute number $MAX_TASK_EXECUTE_NUM, now tryShutdown. ") ExecutorManager.getInstance.getReportExecutor.tryShutdown() } + + // unhealthy node should try to shutdown + if (runningTasks.getCount() == 0 && currentEngineIsUnHealthy) { + logger.info("no task running and ECNode is unHealthy, now to mark engine to Finished.") + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } def toExecuteTask( @@ -195,7 +222,15 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) hookedCode = hook.beforeExecutorExecute(engineExecutionContext, engineCreationContext, hookedCode) }) - }(e => logger.info("failed to do with hook", e)) + } { e => + e match { + case hookExecuteException: HookExecuteException => + failedTasks.increase() + logger.error("failed to do with hook", e) + return ErrorExecuteResponse("hook execute failed task will be failed", e) + case _ => logger.info("failed to do with hook", e) + } + } if (hookedCode.length > 100) { logger.info(s"hooked after code: ${hookedCode.substring(0, 100)} ....") } else { @@ -316,6 +351,7 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) Utils.tryAndWarn(afterExecute(engineConnTask, response)) logger.info(s"Finished to execute task ${engineConnTask.getTaskId}") + // lastTask = null response } { LoggerUtils.removeJobIdMDC() @@ -394,7 +430,7 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) EngineConnObject.getEngineCreationContext.getOptions.asScala.foreach({ case (key, value) => // skip log jobId because it corresponding jobid when the ec created if (!ComputationExecutorConf.PRINT_TASK_PARAMS_SKIP_KEYS.getValue.contains(key)) { - sb.append(s"${key}=${value.toString}\n") + sb.append(s"${key}=${value}\n") } }) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala index f192a45254..9584dd847c 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala @@ -17,9 +17,15 @@ package org.apache.linkis.engineconn.computation.executor.execute +import org.apache.linkis.DataWorkCloudApplication.getApplicationContext +import org.apache.linkis.engineconn.acessible.executor.info.DefaultNodeHealthyInfoManager +import org.apache.linkis.engineconn.acessible.executor.utils.AccessibleExecutorUtils.currentEngineIsUnHealthy +import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask +import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.ConcurrentExecutor -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel import org.apache.linkis.scheduler.executer.ExecuteResponse abstract class ConcurrentComputationExecutor(override val outputPrintLimit: Int = 1000) @@ -42,9 +48,7 @@ abstract class ConcurrentComputationExecutor(override val outputPrintLimit: Int transition(NodeStatus.Busy) } } - logger.info( - s"engineConnTask(${engineConnTask.getTaskId}) running task is ($getRunningTask) status ${getStatus}" - ) + logger.info(s"engineConnTask(${engineConnTask.getTaskId}) running task is ($getRunningTask) ") val response = super.execute(engineConnTask) if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) { EXECUTOR_STATUS_LOCKER.synchronized { @@ -64,10 +68,47 @@ abstract class ConcurrentComputationExecutor(override val outputPrintLimit: Int override def afterExecute( engineConnTask: EngineConnTask, executeResponse: ExecuteResponse - ): Unit = {} + ): Unit = { + // execute once should try to shutdown + if (engineConnTask.getLables.exists(_.isInstanceOf[ExecuteOnceLabel])) { + if (!hasTaskRunning()) { + logger.warn( + s"engineConnTask(${engineConnTask.getTaskId}) is execute once, now to mark engine to Finished" + ) + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } + // unhealthy node should try to shutdown + if (!hasTaskRunning() && currentEngineIsUnHealthy) { + logger.info("no task running and ECNode is unHealthy, now to mark engine to Finished.") + ExecutorManager.getInstance.getReportExecutor.tryShutdown() + } + } override def hasTaskRunning(): Boolean = { getRunningTask > 0 } + override def transition(toStatus: NodeStatus): Unit = { + if (getRunningTask >= getConcurrentLimit && NodeStatus.Unlock == toStatus) { + logger.info( + s"running task($getRunningTask) > concurrent limit:$getConcurrentLimit, can not to mark EC to Unlock" + ) + return + } + super.transition(toStatus) + } + + override def getConcurrentLimit: Int = { + var maxTaskNum = ComputationExecutorConf.ENGINE_CONCURRENT_THREAD_NUM.getValue - 5 + if (maxTaskNum <= 0) { + logger.error( + s"max task num cannot ${maxTaskNum} < 0, should set linkis.engineconn.concurrent.thread.num > 6" + ) + maxTaskNum = 1 + } + logger.info(s"max task num $maxTaskNum") + maxTaskNum + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala index 7767af9797..55e2b1248b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala @@ -21,12 +21,14 @@ import org.apache.linkis.common.io.{FsPath, MetaData, Record} import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetWriter} import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.cs.client.utils.ContextServiceUtils +import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration import org.apache.linkis.engineconn.acessible.executor.listener.event.{ TaskLogUpdateEvent, TaskProgressUpdateEvent, TaskResultCreateEvent, TaskResultSizeCreatedEvent } +import org.apache.linkis.engineconn.acessible.executor.log.LogHelper import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.cs.CSTableResultSetWriter import org.apache.linkis.engineconn.executor.ExecutorExecutionContext @@ -40,7 +42,7 @@ import org.apache.linkis.governance.common.exception.engineconn.EngineConnExecut import org.apache.linkis.protocol.engine.JobProgressInfo import org.apache.linkis.scheduler.executer.{AliasOutputExecuteResponse, OutputExecuteResponse} import org.apache.linkis.storage.{LineMetaData, LineRecord} -import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriterFactory} +import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriter} import org.apache.linkis.storage.resultset.table.TableResultSet import org.apache.commons.io.IOUtils @@ -55,8 +57,7 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String private val resultSetFactory = ResultSetFactory.getInstance - private var defaultResultSetWriter - : org.apache.linkis.common.io.resultset.ResultSetWriter[_ <: MetaData, _ <: Record] = _ + private var defaultResultSetWriter: ResultSetWriter[_ <: MetaData, _ <: Record] = _ private var resultSize = 0 @@ -94,12 +95,7 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String * Note: the writer will be closed at the end of the method * @param resultSetWriter */ - def sendResultSet( - resultSetWriter: org.apache.linkis.common.io.resultset.ResultSetWriter[ - _ <: MetaData, - _ <: Record - ] - ): Unit = { + def sendResultSet(resultSetWriter: ResultSetWriter[_ <: MetaData, _ <: Record]): Unit = { logger.info("Start to send res to entrance") val fileName = new File(resultSetWriter.toFSPath.getPath).getName val index = if (fileName.indexOf(".") < 0) fileName.length else fileName.indexOf(".") @@ -154,13 +150,13 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String ): ResultSet[_ <: MetaData, _ <: Record] = resultSetFactory.getResultSetByType(resultSetType) - override protected def getDefaultResultSetByType: String = resultSetFactory.getResultSetType()(0) + override protected def getDefaultResultSetByType: String = resultSetFactory.getResultSetType(0) def newResultSetWriter( resultSet: ResultSet[_ <: MetaData, _ <: Record], resultSetPath: FsPath, alias: String - ): org.apache.linkis.common.io.resultset.ResultSetWriter[_ <: MetaData, _ <: Record] = { + ): ResultSetWriter[_ <: MetaData, _ <: Record] = { // update by 20200402 resultSet match { case result: TableResultSet => @@ -178,7 +174,7 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String csWriter.setProxyUser(executorUser) csWriter } else { - ResultSetWriterFactory.getResultSetWriter( + ResultSetWriter.getResultSetWriter( resultSet, ComputationExecutorConf.ENGINE_RESULT_SET_MAX_CACHE.getValue.toLong, resultSetPath, @@ -186,7 +182,7 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String ) } case _ => - ResultSetWriterFactory.getResultSetWriter( + ResultSetWriter.getResultSetWriter( resultSet, ComputationExecutorConf.ENGINE_RESULT_SET_MAX_CACHE.getValue.toLong, resultSetPath, @@ -200,15 +196,20 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String logger.info(log) } else { var taskLog = log + val limitLength = ComputationExecutorConf.ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_LENGTH.getValue if ( ComputationExecutorConf.ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_ENABLED.getValue && - log.length > ComputationExecutorConf.ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_LENGTH.getValue + log.length > limitLength ) { - taskLog = - s"${log.substring(0, ComputationExecutorConf.ENGINE_SEND_LOG_TO_ENTRANCE_LIMIT_LENGTH.getValue)}..." + taskLog = s"${log.substring(0, limitLength)}..." + logger.info("The log is too long and will be intercepted,log limit length : {}", limitLength) + } + if (!AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getValue) { + LogHelper.cacheLog(taskLog) + } else { + val listenerBus = getEngineSyncListenerBus + getJobId.foreach(jId => listenerBus.postToAll(TaskLogUpdateEvent(jId, taskLog))) } - val listenerBus = getEngineSyncListenerBus - getJobId.foreach(jId => listenerBus.postToAll(TaskLogUpdateEvent(jId, taskLog))) } override def close(): Unit = { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala index c23e28a3a3..f9164b2cf3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala @@ -19,7 +19,9 @@ package org.apache.linkis.engineconn.computation.executor.hook import org.apache.linkis.common.utils.{ClassUtils, Logging, Utils} import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext +import org.apache.linkis.scheduler.executer.ExecuteResponse import scala.collection.JavaConverters.asScalaSetConverter import scala.collection.mutable.ArrayBuffer @@ -36,6 +38,11 @@ trait ComputationExecutorHook { codeBeforeHook: String ): String = codeBeforeHook + def afterExecutorExecute( + engineConnTask: EngineConnTask, + executeResponse: ExecuteResponse + ): Unit = {} + } object ComputationExecutorHook extends Logging { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/InitSQLHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/InitSQLHook.scala index 78eb007d2c..0cb33cf3d7 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/InitSQLHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/InitSQLHook.scala @@ -33,7 +33,6 @@ import org.apache.commons.io.FileUtils import org.apache.commons.lang3.StringUtils import java.io.File -import java.nio.charset.StandardCharsets import java.util abstract class InitSQLHook extends EngineConnHook with Logging { @@ -92,7 +91,7 @@ abstract class InitSQLHook extends EngineConnHook with Logging { logger.info("read file: " + path) val file = new File(path) if (file.exists()) { - FileUtils.readFileToString(file, StandardCharsets.UTF_8) + FileUtils.readFileToString(file) } else { logger.info("file: [" + path + "] doesn't exist, ignore it.") "" diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoad.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoad.scala new file mode 100644 index 0000000000..34928d8525 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoad.scala @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.computation.executor.hook + +import org.apache.linkis.common.conf.Configuration.IS_VIEW_FS_ENV +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf +import org.apache.linkis.engineconn.computation.executor.execute.{ + ComputationExecutor, + EngineExecutionContext +} +import org.apache.linkis.engineconn.core.engineconn.EngineConnManager +import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import org.apache.linkis.rpc.Sender +import org.apache.linkis.udf.UDFClientConfiguration +import org.apache.linkis.udf.api.rpc.{RequestPythonModuleProtocol, ResponsePythonModuleProtocol} +import org.apache.linkis.udf.entity.PythonModuleInfoVO + +import org.apache.commons.lang3.StringUtils + +import java.util + +import scala.collection.JavaConverters._ +import scala.collection.mutable + +/** + * The PythonModuleLoad class is designed to load Python modules into the execution environment + * dynamically. This class is not an extension of UDFLoad, but shares a similar philosophy of + * handling dynamic module loading based on user preferences and system configurations. + */ +abstract class PythonModuleLoad extends Logging { + + /** Abstract properties to be defined by the subclass */ + protected val engineType: String + protected val runType: RunType + + protected def getEngineType(): String = engineType + + protected def constructCode(pythonModuleInfo: PythonModuleInfoVO): String + + private def queryPythonModuleRpc( + userName: String, + engineType: String + ): java.util.List[PythonModuleInfoVO] = { + val infoList = Sender + .getSender(UDFClientConfiguration.UDF_SERVICE_NAME.getValue) + .ask(RequestPythonModuleProtocol(userName, engineType)) + .asInstanceOf[ResponsePythonModuleProtocol] + .getModulesInfo() + infoList + } + + protected def getLoadPythonModuleCode: Array[String] = { + val engineCreationContext = + EngineConnManager.getEngineConnManager.getEngineConn.getEngineCreationContext + val user = engineCreationContext.getUser + + var infoList: util.List[PythonModuleInfoVO] = + Utils.tryAndWarn(queryPythonModuleRpc(user, getEngineType())) + if (infoList == null) { + logger.info("rpc get info is empty.") + infoList = new util.ArrayList[PythonModuleInfoVO]() + } + + // 替换Viewfs + if (IS_VIEW_FS_ENV.getValue) { + infoList.asScala.foreach { info => + val path = info.getPath + logger.info(s"python path: ${path}") + if (path.startsWith("hdfs") || path.startsWith("viewfs")) { + info.setPath(path.replace("hdfs://", "viewfs://")) + } else { + info.setPath("viewfs://" + path) + } + } + } else { + + infoList.asScala.foreach { info => + val path = info.getPath + logger.info(s"hdfs python path: ${path}") + if (!path.startsWith("hdfs")) { + info.setPath("hdfs://" + path) + } + } + } + + logger.info(s"${user} load python modules: ") + infoList.asScala.foreach(l => logger.info(s"module name:${l.getName}, path:${l.getPath}\n")) + + // 创建加载code + val codes: mutable.Buffer[String] = infoList.asScala + .filter { info => StringUtils.isNotEmpty(info.getPath) } + .map(constructCode) + // 打印codes + val str: String = codes.mkString("\n") + logger.info(s"python codes: $str") + codes.toArray + } + + private def executeFunctionCode(codes: Array[String], executor: ComputationExecutor): Unit = { + if (null == codes || null == executor) { + return + } + codes.foreach { code => + logger.info("Submit function registration to engine, code: " + code) + Utils.tryCatch(executor.executeLine(new EngineExecutionContext(executor), code)) { + t: Throwable => + logger.error("Failed to load python module", t) + null + } + } + } + + /** + * Generate and execute the code necessary for loading Python modules. + * + * @param executor + * An object capable of executing code in the current engine context. + */ + protected def loadPythonModules(labels: Array[Label[_]]): Unit = { + + val codes = getLoadPythonModuleCode + logger.info(s"codes length: ${codes.length}") + if (null != codes && codes.nonEmpty) { + val executor = ExecutorManager.getInstance.getExecutorByLabels(labels) + if (executor != null) { + val className = executor.getClass.getName + logger.info(s"executor class: ${className}") + } else { + logger.error(s"Failed to load python, executor is null") + } + + executor match { + case computationExecutor: ComputationExecutor => + executeFunctionCode(codes, computationExecutor) + case _ => + } + } + logger.info(s"Successful to load python, engineType : ${engineType}") + } + +} + +// Note: The actual implementation of methods like `executeFunctionCode` and `construct diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHook.scala new file mode 100644 index 0000000000..80eaa888b8 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonModuleLoadEngineConnHook.scala @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.computation.executor.hook + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.common.engineconn.EngineConn +import org.apache.linkis.engineconn.common.hook.EngineConnHook +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.CodeLanguageLabel + +abstract class PythonModuleLoadEngineConnHook + extends PythonModuleLoad + with EngineConnHook + with Logging { + + override def afterExecutionExecute( + engineCreationContext: EngineCreationContext, + engineConn: EngineConn + ): Unit = { + Utils.tryAndWarnMsg { + val codeLanguageLabel = new CodeLanguageLabel + codeLanguageLabel.setCodeType(runType.toString) + logger.info(s"engineType: ${engineType}") + val labels = Array[Label[_]](codeLanguageLabel) + loadPythonModules(labels) + }(s"Failed to load Python Modules: ${engineType}") + + } + + override def afterEngineServerStartFailed( + engineCreationContext: EngineCreationContext, + throwable: Throwable + ): Unit = { + logger.error(s"Failed to start Engine Server: ${throwable.getMessage}", throwable) + } + + override def beforeCreateEngineConn(engineCreationContext: EngineCreationContext): Unit = { + logger.info("Preparing to load Python Module...") + } + + override def beforeExecutionExecute( + engineCreationContext: EngineCreationContext, + engineConn: EngineConn + ): Unit = { + logger.info(s"Before executing command on load Python Module.") + } + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonSparkEngineHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonSparkEngineHook.scala new file mode 100644 index 0000000000..0fe554f93d --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/PythonSparkEngineHook.scala @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.computation.executor.hook + +import org.apache.linkis.manager.label.entity.engine.RunType +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import org.apache.linkis.udf.entity.PythonModuleInfoVO + +/** + * 定义一个用于Spark引擎的Python模块加载与执行挂钩的类 + */ +class PythonSparkEngineHook extends PythonModuleLoadEngineConnHook { + + // 设置engineType属性为"spark",表示此挂钩适用于Spark数据处理引擎 + override val engineType: String = "spark" + + // 设置runType属性为RunType.PYSPARK,表示此挂钩将执行PySpark类型的代码 + override protected val runType: RunType = RunType.PYSPARK + + // 重写constructCode方法,用于根据Python模块信息构造加载模块的代码 + override protected def constructCode(pythonModuleInfo: PythonModuleInfoVO): String = { + // 使用pythonModuleInfo的path属性,构造SparkContext.addPyFile的命令字符串 + // 这个命令在PySpark环境中将模块文件添加到所有worker上,以便在代码中可以使用 + val path: String = pythonModuleInfo.getPath + val loadCode = s"sc.addPyFile('${path}')" + logger.info(s"pythonLoadCode: ${loadCode}") + loadCode + } + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala index 6f31bd25e8..91af2811a6 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala @@ -42,7 +42,6 @@ import org.apache.commons.io.{FileUtils, IOUtils} import org.apache.commons.lang3.StringUtils import java.io.File -import java.nio.charset.StandardCharsets import scala.collection.JavaConverters.asScalaBufferConverter import scala.collection.mutable.ArrayBuffer @@ -119,7 +118,7 @@ abstract class UDFLoad extends Logging { logger.info("read file: " + path) val file = new File(path) if (file.exists()) { - FileUtils.readFileToString(file, StandardCharsets.UTF_8) + FileUtils.readFileToString(file) } else { logger.info("udf file: [" + path + "] doesn't exist, ignore it.") "" diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UseDatabaseEngineHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UseDatabaseEngineHook.scala index 9ddde065cc..d6887218b7 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UseDatabaseEngineHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/UseDatabaseEngineHook.scala @@ -34,7 +34,7 @@ import org.apache.commons.lang3.StringUtils abstract class UseDatabaseEngineHook extends EngineConnHook with Logging { - private val USE_DEFAULT_DB_ENABLE = CommonVars("linkis.bdp.use.default.db.enable", false) + private val USE_DEFAULT_DB_ENABLE = CommonVars("wds.linkis.bdp.use.default.db.enable", true) override def beforeCreateEngineConn(engineCreationContext: EngineCreationContext): Unit = {} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala index 77616944d2..9469c00685 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/executor/ExecuteOnceHook.scala @@ -28,8 +28,11 @@ import org.apache.linkis.engineconn.computation.executor.execute.EngineExecution import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext +import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel +import java.util + class ExecuteOnceHook extends ComputationExecutorHook with ExecutorLockListener with Logging { private var executeOnce = false @@ -47,6 +50,11 @@ class ExecuteOnceHook extends ComputationExecutorHook with ExecutorLockListener codeBeforeHook: String ): String = { executeOnce = engineExecutionContext.getLabels.exists(_.isInstanceOf[ExecuteOnceLabel]) + val creationLabelList: util.List[Label[_]] = engineCreationContext.getLabels() + if (creationLabelList != null) { + executeOnce = + executeOnce || creationLabelList.toArray().exists(_.isInstanceOf[ExecuteOnceLabel]) + } if (executeOnce && !isRegister) { isRegister = true asyncListenerBusContext.addListener(this) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala index 1f8c491ced..55b88f520b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/rs/RsOutputStream.scala @@ -19,7 +19,8 @@ package org.apache.linkis.engineconn.computation.executor.rs import org.apache.linkis.common.io.{MetaData, Record} import org.apache.linkis.common.io.resultset.ResultSetWriter -import org.apache.linkis.common.utils.Logging +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext import org.apache.linkis.storage.LineRecord @@ -45,6 +46,9 @@ class RsOutputStream extends OutputStream with Logging { } def reset(engineExecutionContext: EngineExecutionContext): Unit = { + if (ComputationExecutorConf.CLOSE_RS_OUTPUT_WHEN_RESET_BY_DEFAULT_ENABLED) { + Utils.tryQuietly(close()) + } writer = engineExecutionContext.createDefaultResultSetWriter() writer.addMetaData(null) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala index 9dba95ef66..28df29db31 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala @@ -50,6 +50,7 @@ import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.ResourceFetchExecutor import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.engineconn.executor.listener.event.EngineConnSyncEvent +import org.apache.linkis.engineconn.launch.EngineConnServer import org.apache.linkis.governance.common.constant.ec.ECConstants import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.governance.common.exception.engineconn.{ @@ -58,11 +59,13 @@ import org.apache.linkis.governance.common.exception.engineconn.{ } import org.apache.linkis.governance.common.protocol.task._ import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} +import org.apache.linkis.hadoop.common.utils.KerberosUtils import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.manager.common.protocol.resource.{ ResponseTaskRunningInfo, ResponseTaskYarnResource } +import org.apache.linkis.manager.engineplugin.common.launch.process.LaunchConstants import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.protocol.constants.TaskConstant import org.apache.linkis.protocol.message.RequestProtocol @@ -106,19 +109,12 @@ class TaskExecutionServiceImpl private lazy val executorManager = ExecutorManager.getInstance private val taskExecutedNum = new AtomicInteger(0) private var lastTask: EngineConnTask = _ - private var lastTaskFuture: Future[_] = _ + private var syncLastTaskThread: Thread = _ private var lastTaskDaemonFuture: Future[_] = _ - // for concurrent executor - private var consumerThread: Thread = _ - private var concurrentTaskQueue: BlockingQueue[EngineConnTask] = _ - @Autowired private var lockService: LockService = _ - private val asyncListenerBusContext = - ExecutorListenerBusContext.getExecutorListenerBusContext().getEngineConnAsyncListenerBus - private val syncListenerBus = ExecutorListenerBusContext.getExecutorListenerBusContext().getEngineConnSyncListenerBus @@ -133,18 +129,19 @@ class TaskExecutionServiceImpl "ConcurrentEngineConnThreadPool" ) - private val CONCURRENT_TASK_LOCKER = new Object - private val taskAsyncSubmitExecutor: ExecutionContextExecutorService = Utils.newCachedExecutionContext( ComputationExecutorConf.TASK_ASYNC_MAX_THREAD_SIZE, - "TaskExecution-Thread-" + ComputationEngineConstant.TASK_EXECUTION_THREAD ) @PostConstruct def init(): Unit = { LogHelper.setLogListener(this) syncListenerBus.addListener(this) + if (ComputationExecutorConf.ENGINE_KERBEROS_AUTO_REFRESH_ENABLED) { + KerberosUtils.startKerberosRefreshThread() + } } private def sendToEntrance(task: EngineConnTask, msg: RequestProtocol): Unit = { @@ -154,7 +151,8 @@ class TaskExecutionServiceImpl sender = Sender.getSender(task.getCallbackServiceInstance()) sender.send(msg) } else { - logger.warn("SendtoEntrance error, cannot find entrance instance.") + // todo + logger.debug("SendtoEntrance error, cannot find entrance instance.") } } { t => val errorMsg = s"SendToEntrance error. $msg" + t.getCause @@ -166,6 +164,12 @@ class TaskExecutionServiceImpl } } + /** + * submit to async thread return submit response + * @param requestTask + * @param sender + * @return + */ @Receiver override def execute(requestTask: RequestTask, sender: Sender): ExecuteResponse = Utils.tryFinally { @@ -186,7 +190,7 @@ class TaskExecutionServiceImpl if (!lockService.isLockExist(requestTask.getLock)) { logger.error(s"Lock ${requestTask.getLock} not exist, cannot execute.") return ErrorExecuteResponse( - "Lock not exist", + "Lock not exixt", new EngineConnExecutorErrorException( EngineConnExecutorErrorCode.INVALID_LOCK, "Lock : " + requestTask.getLock + " not exist(您的锁无效,请重新获取后再提交)." @@ -200,7 +204,11 @@ class TaskExecutionServiceImpl ) } - val taskId: Int = taskExecutedNum.incrementAndGet() + val taskId: String = if (StringUtils.isNotBlank(jobId)) { + jobId + } else { + String.valueOf(taskExecutedNum.incrementAndGet()) + } val retryAble: Boolean = { val retry = requestTask.getProperties.getOrDefault( @@ -215,7 +223,7 @@ class TaskExecutionServiceImpl System.getProperties.put(ComputationExecutorConf.JOB_ID_TO_ENV_KEY, jobId) logger.info(s"Received job with id ${jobId}.") } - val task = new CommonEngineConnTask(String.valueOf(taskId), retryAble) + val task = new CommonEngineConnTask(taskId, retryAble) task.setCode(requestTask.getCode) task.setProperties(requestTask.getProperties) task.data(ComputationEngineConstant.LOCK_TYPE_NAME, requestTask.getLock) @@ -276,19 +284,6 @@ class TaskExecutionServiceImpl } } - private def restExecutorLabels(labels: Array[Label[_]]): Array[Label[_]] = { - var newLabels = labels - ExecutorLabelsRestHook.getExecutorLabelsRestHooks.foreach(hooke => - newLabels = hooke.restExecutorLabels(newLabels) - ) - newLabels - } - - // override def taskStatus(taskID: String): ResponseTaskStatus = { - // val task = taskIdCache.get(taskID) - // ResponseTaskStatus(taskID, task.getStatus.id) - // } - private def submitTask( task: CommonEngineConnTask, computationExecutor: ComputationExecutor @@ -308,100 +303,58 @@ class TaskExecutionServiceImpl task: CommonEngineConnTask, computationExecutor: ComputationExecutor ): ExecuteResponse = { - var response: ExecuteResponse = SubmitResponse(task.getTaskId) - Utils.tryCatch { - computationExecutor.execute(task) - } { t => - logger.error(s"Failed to submit task${task.getTaskId} ", t) - response = ErrorExecuteResponse("Failed to submit task", t) - null - } - response + computationExecutor.execute(task) } private def submitSyncTask( task: CommonEngineConnTask, computationExecutor: ComputationExecutor ): ExecuteResponse = { - val runTask = new Runnable { - override def run(): Unit = Utils.tryAndWarn { - LogHelper.dropAllRemainLogs() - executeTask(task, computationExecutor) - } - } + LogHelper.dropAllRemainLogs() lastTask = task - lastTaskFuture = Utils.defaultScheduler.submit(runTask) - lastTaskDaemonFuture = openDaemonForTask(task, lastTaskFuture, Utils.defaultScheduler) - SubmitResponse(task.getTaskId) + syncLastTaskThread = Thread.currentThread() + lastTaskDaemonFuture = openDaemonForTask(task, Utils.defaultScheduler) + val res = executeTask(task, computationExecutor) + res } private def submitConcurrentTask( task: CommonEngineConnTask, executor: ConcurrentComputationExecutor ): ExecuteResponse = { - if (null == concurrentTaskQueue) CONCURRENT_TASK_LOCKER.synchronized { - if (null == concurrentTaskQueue) { - concurrentTaskQueue = new LinkedBlockingDeque[EngineConnTask]() - } - } - concurrentTaskQueue.put(task) - if (null == consumerThread) CONCURRENT_TASK_LOCKER.synchronized { - if (null == consumerThread) { - consumerThread = new Thread(createConsumerRunnable(executor)) - consumerThread.setDaemon(true) - consumerThread.setName("ConcurrentTaskQueueFifoConsumerThread") - consumerThread.start() - } - } - SubmitResponse(task.getTaskId) - } - - private def createConsumerRunnable(executor: ComputationExecutor): Thread = { - val consumerRunnable = new Runnable { + val concurrentJob = new Runnable { override def run(): Unit = { - var errCount = 0 - val ERR_COUNT_MAX = 20 - while (true) { - Utils.tryCatch { - if (!executor.isBusy && !executor.isClosed) { - val task = concurrentTaskQueue.take() - val concurrentJob = new Runnable { - override def run(): Unit = { - lastTask = task - Utils.tryCatch { - logger.info(s"Start to run task ${task.getTaskId}") - executeTask(task, executor) - } { case t: Throwable => - errCount += 1 - logger.error(s"Execute task ${task.getTaskId} failed :", t) - if (errCount > ERR_COUNT_MAX) { - logger.error( - s"Executor run failed for ${errCount} times over ERROR_COUNT_MAX : ${ERR_COUNT_MAX}, will shutdown." - ) - executor.transition(NodeStatus.ShuttingDown) - } - } - } - } - cachedThreadPool.submit(concurrentJob) - } - Thread.sleep(20) - } { case t: Throwable => - logger.error(s"consumerThread failed :", t) - } + Utils.tryCatch { + val jobId = JobUtils.getJobIdFromMap(task.getProperties) + LoggerUtils.setJobIdMDC(jobId) + logger.info(s"Start to run task ${task.getTaskId}") + executeTask(task, executor) + } { case t: Throwable => + logger.warn("Failed to execute task ", t) + sendToEntrance( + task, + ResponseTaskError(task.getTaskId, ExceptionUtils.getRootCauseMessage(t)) + ) + sendToEntrance(task, ResponseTaskStatus(task.getTaskId, ExecutionNodeStatus.Failed)) + LoggerUtils.removeJobIdMDC() + null } } } - new Thread(consumerRunnable) + Utils.tryCatch(cachedThreadPool.submit(concurrentJob)) { case e: Exception => + logger.error(s"Failed to submit task ${task.getTaskId}", e) + throw e + } + SubmitResponse(task.getTaskId) } - private def executeTask(task: EngineConnTask, executor: ComputationExecutor): Unit = + private def executeTask(task: EngineConnTask, executor: ComputationExecutor): ExecuteResponse = Utils.tryFinally { val jobId = JobUtils.getJobIdFromMap(task.getProperties) LoggerUtils.setJobIdMDC(jobId) executor.execute(task) - clearCache(task.getTaskId) } { + clearCache(task.getTaskId) LoggerUtils.removeJobIdMDC() } @@ -414,59 +367,54 @@ class TaskExecutionServiceImpl * scheduler * @return */ - private def openDaemonForTask( - task: EngineConnTask, - taskFuture: Future[_], - scheduler: ExecutorService - ): Future[_] = { + private def openDaemonForTask(task: EngineConnTask, scheduler: ExecutorService): Future[_] = { val sleepInterval = ComputationExecutorConf.ENGINE_PROGRESS_FETCH_INTERVAL.getValue scheduler.submit(new Runnable { override def run(): Unit = { + logger.info(s"start daemon thread ${task.getTaskId}, ${task.getStatus}") Utils.tryQuietly(Thread.sleep(TimeUnit.MILLISECONDS.convert(1, TimeUnit.SECONDS))) - while (null != taskFuture && !taskFuture.isDone) { - if (!ExecutionNodeStatus.isCompleted(task.getStatus)) { - Utils.tryAndWarn { - val progressResponse = Utils.tryCatch(taskProgress(task.getTaskId)) { - case e: Exception => - logger.info("Failed to get progress", e) - null - } - val resourceResponse = Utils.tryCatch(buildResourceMap(task)) { case e: Exception => - logger.info("Failed to get resource", e) + while (!ExecutionNodeStatus.isCompleted(task.getStatus)) { + Utils.tryAndWarn { + val progressResponse = Utils.tryCatch(taskProgress(task.getTaskId)) { + case e: Exception => + logger.info("Failed to get progress", e) null - } - val extraInfoMap = Utils.tryCatch(buildExtraInfoMap(task)) { case e: Exception => - logger.info("Failed to get extra info ", e) - null - } - val resourceMap = - if (null != resourceResponse) resourceResponse.getResourceMap else null - - /** - * It is guaranteed that there must be progress the progress must be greater than or - * equal to 0.1 - */ - val newProgressResponse = if (null == progressResponse) { - ResponseTaskProgress(task.getTaskId, 0.1f, null) - } else if (progressResponse.progress < 0.1f) { - ResponseTaskProgress(task.getTaskId, 0.1f, progressResponse.progressInfo) - } else { - progressResponse - } - val respRunningInfo: ResponseTaskRunningInfo = new ResponseTaskRunningInfo( - newProgressResponse.execId, - newProgressResponse.progress, - newProgressResponse.progressInfo, - resourceMap, - extraInfoMap - ) - sendToEntrance(task, respRunningInfo) } + val resourceResponse = Utils.tryCatch(buildResourceMap(task)) { case e: Exception => + logger.info("Failed to get resource", e) + null + } + val extraInfoMap = Utils.tryCatch(buildExtraInfoMap(task)) { case e: Exception => + logger.info("Failed to get extra info ", e) + null + } + val resourceMap = if (null != resourceResponse) resourceResponse.resourceMap else null + + /** + * It is guaranteed that there must be progress the progress must be greater than or + * equal to 0.1 + */ + val newProgressResponse = if (null == progressResponse) { + ResponseTaskProgress(task.getTaskId, 0.1f, null) + } else if (progressResponse.progress < 0.1f) { + ResponseTaskProgress(task.getTaskId, 0.1f, progressResponse.progressInfo) + } else { + progressResponse + } + val respRunningInfo: ResponseTaskRunningInfo = ResponseTaskRunningInfo( + newProgressResponse.execId, + newProgressResponse.progress, + newProgressResponse.progressInfo, + resourceMap, + extraInfoMap + ) + sendToEntrance(task, respRunningInfo) } Utils.tryQuietly( Thread.sleep(TimeUnit.MILLISECONDS.convert(sleepInterval, TimeUnit.SECONDS)) ) } + logger.info(s"daemon thread exit ${task.getTaskId}, ${task.getStatus}") } }) } @@ -495,7 +443,7 @@ class TaskExecutionServiceImpl taskYarnResource(task.getTaskId) match { case responseTaskYarnResource: ResponseTaskYarnResource => if ( - responseTaskYarnResource.getResourceMap != null && !responseTaskYarnResource.getResourceMap.isEmpty + responseTaskYarnResource.resourceMap != null && !responseTaskYarnResource.resourceMap.isEmpty ) { responseTaskYarnResource } else { @@ -512,7 +460,7 @@ class TaskExecutionServiceImpl executor match { case executor: ResourceFetchExecutor => val resourceWithApplicationId = executor.FetchResource - new ResponseTaskYarnResource(taskID, resourceWithApplicationId) + ResponseTaskYarnResource(taskID, resourceWithApplicationId) case _ => null } } @@ -553,16 +501,20 @@ class TaskExecutionServiceImpl override def killTask(taskID: String): Unit = { val executor = taskIdCache.getIfPresent(taskID) if (null != executor) { - executor.killTask(taskID) + Utils.tryAndWarn(executor.killTask(taskID)) logger.info(s"TaskId : ${taskID} was killed by user.") } else { logger.error(s"Kill failed, got invalid executor : null for taskId : ${taskID}") } if (null != lastTask && lastTask.getTaskId.equalsIgnoreCase(taskID)) { - if (null != lastTaskFuture && !lastTaskFuture.isDone) { - Utils.tryAndWarn { - lastTaskFuture.cancel(true) - } + if (null != syncLastTaskThread) { + logger.info(s"try to interrupt thread:${taskID}") + Utils.tryAndWarn(syncLastTaskThread.interrupt()) + logger.info(s"thread isInterrupted:${taskID}") + } else { + logger.info(s"skip to force stop thread:${taskID}") + } + if (null != lastTaskDaemonFuture && !lastTaskDaemonFuture.isDone) { Utils.tryAndWarn { // Close the daemon also lastTaskDaemonFuture.cancel(true) @@ -630,7 +582,7 @@ class TaskExecutionServiceImpl if (null != task) { sendToEntrance(task, ResponseTaskLog(logUpdateEvent.taskId, logUpdateEvent.log)) } else { - logger.warn("Task cannot null! logupdateEvent: " + logUpdateEvent.taskId) + logger.error("Task cannot null! logupdateEvent: " + logUpdateEvent.taskId) } } else if (null != lastTask) { val executor = executorManager.getReportExecutor @@ -656,7 +608,6 @@ class TaskExecutionServiceImpl val task = getTaskByTaskId(taskStatusChangedEvent.taskId) if (null != task) { if (ExecutionNodeStatus.isCompleted(taskStatusChangedEvent.toStatus)) { - lastTask = task LogHelper.pushAllRemainLogs() } val toStatus = taskStatusChangedEvent.toStatus @@ -688,9 +639,9 @@ class TaskExecutionServiceImpl val resourceResponse = buildResourceMap(task) val extraInfoMap = buildExtraInfoMap(task) - val resourceMap = if (null != resourceResponse) resourceResponse.getResourceMap else null + val resourceMap = if (null != resourceResponse) resourceResponse.resourceMap else null - val respRunningInfo: ResponseTaskRunningInfo = new ResponseTaskRunningInfo( + val respRunningInfo: ResponseTaskRunningInfo = ResponseTaskRunningInfo( taskProgressUpdateEvent.taskId, taskProgressUpdateEvent.progress, taskProgressUpdateEvent.progressInfo, @@ -732,7 +683,7 @@ class TaskExecutionServiceImpl if (null != executor) { executor.getTaskById(taskId) } else { - logger.warn(s"Executor of taskId : $taskId is not cached.") + logger.error(s"Executor of taskId : $taskId is not cached.") null } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala index 1f13380c01..c7d78ef9eb 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala @@ -57,7 +57,7 @@ class ECTaskEntranceMonitor def unregister(taskID: String): Unit = { if (!wrapperMap.containsKey(taskID)) { - logger.error("attempted to unregister non-existing EngineConnTask!! task-id: " + taskID) + logger.warn("attempted to unregister non-existing EngineConnTask!! task-id: " + taskID) } wrapperMap.remove(taskID) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala index f2b894ef91..3cc1fdfe82 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala @@ -18,6 +18,8 @@ package org.apache.linkis.engineconn.computation.executor.upstream import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration +import org.apache.linkis.engineconn.acessible.executor.service.ExecutorHeartbeatServiceHolder import org.apache.linkis.engineconn.common.exception.EngineConnException import org.apache.linkis.engineconn.computation.executor.upstream.access.{ ConnectionInfoAccess, @@ -29,6 +31,8 @@ import org.apache.linkis.engineconn.computation.executor.upstream.handler.{ } import org.apache.linkis.engineconn.computation.executor.upstream.wrapper.ConnectionInfoWrapper import org.apache.linkis.engineconn.computation.executor.utlis.ComputationErrorCode +import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.engineconn.executor.entity.ConcurrentExecutor import org.apache.commons.lang3.concurrent.BasicThreadFactory @@ -112,6 +116,23 @@ abstract class SingleThreadUpstreamConnectionMonitor( "requesting connection info: " + util.Arrays .toString(Collections.list(wrapperMap.keys).toArray()) ) + if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getValue) { + val executor = ExecutorManager.getInstance.getReportExecutor + executor match { + case concurrentExecutor: ConcurrentExecutor => + if (toBeRequested.size() > (concurrentExecutor.getConcurrentLimit + 20)) { + logger.warn( + s"Executor running task has exceed the limit ${toBeRequested.size()}, executor id ${concurrentExecutor.getId}" + ) + ExecutorHeartbeatServiceHolder + .getDefaultHeartbeatService() + .setSelfUnhealthy( + s"running task has exceed the limit: ${concurrentExecutor.getConcurrentLimit}" + ) + } + case _ => + } + } val infoAccessRequest = generateInfoAccessRequest(toBeRequested) val connectionInfoList = infoAccess.getUpstreamInfo(infoAccessRequest) logger.info( diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala index 0acc47260e..44a80cff62 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala @@ -66,8 +66,7 @@ class ECTaskEntranceMonitorService logger.info("registering new task: " + event.taskId) eCTaskEntranceMonitor.register(event.task, event.executor) } else if ( - fromStatus == ExecutionNodeStatus.Running && - (toStatus == ExecutionNodeStatus.Succeed || toStatus == ExecutionNodeStatus.Failed || toStatus == ExecutionNodeStatus.Cancelled || toStatus == ExecutionNodeStatus.Timeout) + !ExecutionNodeStatus.isCompleted(fromStatus) && ExecutionNodeStatus.isCompleted(toStatus) ) { logger.info("unRegistering task: " + event.taskId) eCTaskEntranceMonitor.unregister(event.task.getTaskId) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala index 2a03b405cb..4e50fc0cf0 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala @@ -27,4 +27,6 @@ object ComputationEngineConstant { def CS_HOOK_ORDER: Int = -1 + val TASK_EXECUTION_THREAD = "TaskExecution-Thread-" + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala index 513eac0bd0..a20358b57c 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala @@ -81,13 +81,9 @@ object EngineConnConf { val HIVE_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX = CommonVars("wds.linkis.hive.engine.yarn.app.id.parse.regex", "(application_\\d{13}_\\d+)") - val SEATUNNEL_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX = - CommonVars("wds.linkis.seatunnel.engine.yarn.app.id.parse.regex", "(application_\\d{13}_\\d+)") - val JOB_YARN_TASK_URL = CommonVars("linkis.job.task.yarn.url", ""); val JOB_YARN_CLUSTER_TASK_URL = CommonVars("linkis.job.task.yarn.cluster.url", ""); - def getWorkHome: String = System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue) def getEngineTmpDir: String = System.getenv(ENGINE_CONN_LOCAL_TMP_DIR.getValue) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala index cdc14592d8..fec2756f9f 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConstant.scala @@ -27,5 +27,5 @@ object EngineConnConstant { var hiveLogReg = "The url to track the job: http://.*?/proxy/(application_[0-9]+_[0-9]+)/" - val YARN_LOG_URL = "INFO yarn application url:" + val YARN_LOG_URL = "Yarn application url:" } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala index aa69bdae8c..d2247a6d2e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/util/EngineConnUtils.scala @@ -17,7 +17,7 @@ package org.apache.linkis.engineconn.core.util -import com.google.gson.{Gson, GsonBuilder, ToNumberPolicy} +import com.google.gson.{GsonBuilder, ToNumberPolicy} object EngineConnUtils { diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/AbstractLogCache.java similarity index 50% rename from linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.java rename to linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/AbstractLogCache.java index f8e01a682c..a1d74fadc6 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/script/compaction/CommonScriptCompaction.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/AbstractLogCache.java @@ -15,33 +15,24 @@ * limitations under the License. */ -package org.apache.linkis.storage.script.compaction; +package org.apache.linkis.engineconn.acessible.executor.log; -import org.apache.linkis.storage.script.Compaction; -import org.apache.linkis.storage.script.Variable; +import org.apache.linkis.common.log.LogUtils; -public abstract class CommonScriptCompaction implements Compaction { +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.LogEvent; - @Override - public String compact(Variable variable) { - switch (variable.getSortParent()) { - case "variable": - return prefix() + " " + variable.getKey() + "=" + variable.getValue(); - default: - return prefixConf() - + " " - + variable.getSortParent() - + " " - + variable.getSort() - + " " - + variable.getKey() - + "=" - + variable.getValue(); +public abstract class AbstractLogCache implements LogCache { + protected String generateLog(LogEvent event) { + if (event.getLevel() == Level.INFO) { + return LogUtils.generateInfo(event.getMessage().toString()); + } else if (event.getLevel() == Level.WARN) { + return LogUtils.generateWarn(event.getMessage().toString()); + } else if (event.getLevel() == Level.ERROR) { + return LogUtils.generateERROR(event.getMessage().toString()); + } else if (event.getLevel() == Level.FATAL) { + return LogUtils.generateSystemError(event.getMessage().toString()); } - } - - @Override - public String getAnnotationSymbol() { - return prefix().split("@")[0]; + return ""; } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/MountLogCache.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/MountLogCache.java index e158bdc86b..e80298cef1 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/MountLogCache.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/MountLogCache.java @@ -27,7 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class MountLogCache implements LogCache { +public class MountLogCache extends AbstractLogCache { private static final Logger logger = LoggerFactory.getLogger(MountLogCache.class); diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java index b27464285d..05976bb2c7 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/SendAppender.java @@ -20,16 +20,18 @@ import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration; import org.apache.linkis.engineconn.common.conf.EngineConnConf; import org.apache.linkis.engineconn.common.conf.EngineConnConstant; +import org.apache.linkis.engineconn.common.creation.EngineCreationContext; +import org.apache.linkis.engineconn.core.EngineConnObject; import org.apache.linkis.engineconn.executor.listener.EngineConnSyncListenerBus; import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext; +import org.apache.linkis.manager.label.constant.LabelKeyConstant; +import org.apache.linkis.manager.label.entity.Label; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.core.Filter; import org.apache.logging.log4j.core.Layout; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.appender.AbstractAppender; -import org.apache.logging.log4j.core.config.Property; import org.apache.logging.log4j.core.config.plugins.Plugin; import org.apache.logging.log4j.core.config.plugins.PluginAttribute; import org.apache.logging.log4j.core.config.plugins.PluginElement; @@ -70,9 +72,12 @@ public SendAppender( final Filter filter, final Layout layout, final boolean ignoreExceptions) { - super(name, filter, layout, ignoreExceptions, Property.EMPTY_ARRAY); + super(name, filter, layout, ignoreExceptions); this.logCache = LogHelper.logCache(); + // SendThread thread = new SendThread(); logger.info("SendAppender init success"); + // TIMER.schedule(thread, 2000, (Integer) + // AccessibleExecutorConfiguration.ENGINECONN_LOG_SEND_TIME_INTERVAL().getValue()); } @Override @@ -96,7 +101,7 @@ public void append(LogEvent event) { } } if (!flag) { - logStr = matchLog(logStr); + // logStr = matchLog(logStr); logCache.cacheLog(logStr); } } else { @@ -120,15 +125,26 @@ public static SendAppender createAppender( return new SendAppender(name, filter, layout, ignoreExceptions); } + /** + * * + * + *

Match the hive log, if it matches the yarn log, print the log and replace it + */ public String matchLog(String logLine) { - String yarnUrl = EngineConnConf.JOB_YARN_TASK_URL().getValue(); - if (StringUtils.isNotBlank(yarnUrl)) { - Matcher hiveMatcher = Pattern.compile(EngineConnConstant.hiveLogReg()).matcher(logLine); - if (hiveMatcher.find()) { - logLine = - hiveMatcher.replaceAll( - EngineConnConstant.YARN_LOG_URL() + yarnUrl + hiveMatcher.group(1)); + Matcher hiveMatcher = Pattern.compile(EngineConnConstant.hiveLogReg()).matcher(logLine); + if (hiveMatcher.find()) { + String yarnUrl = EngineConnConf.JOB_YARN_TASK_URL().getValue(); + EngineCreationContext engineContext = EngineConnObject.getEngineCreationContext(); + if (null != engineContext) { + for (Label label : engineContext.getLabels()) { + if (label.getLabelKey().equals(LabelKeyConstant.YARN_CLUSTER_KEY)) { + yarnUrl = EngineConnConf.JOB_YARN_CLUSTER_TASK_URL().getValue(); + } + } } + logLine = + hiveMatcher.replaceAll( + EngineConnConstant.YARN_LOG_URL() + yarnUrl + hiveMatcher.group(1)); } return logLine; } diff --git a/linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextValue.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/TimeLogCache.java similarity index 68% rename from linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextValue.java rename to linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/TimeLogCache.java index 8c4d4d1447..2ba3efc63d 100644 --- a/linkis-public-enhancements/linkis-pes-common/src/test/java/org/apache/linkis/cs/listener/test/TestContextValue.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/log/TimeLogCache.java @@ -15,30 +15,27 @@ * limitations under the License. */ -package org.apache.linkis.cs.listener.test; +package org.apache.linkis.engineconn.acessible.executor.log; -import org.apache.linkis.cs.common.entity.source.ContextValue; +import java.util.List; -public class TestContextValue implements ContextValue { - private Object value; - - private String keywords; +/** Description: Cache with time as storage unit(以时间作为存储单位的缓存方式) */ +public class TimeLogCache extends AbstractLogCache { + @Override + public void cacheLog(String log) {} @Override - public String getKeywords() { + public List getLog(int num) { return null; } @Override - public void setKeywords(String keywords) {} - - @Override - public Object getValue() { - return this.value; + public List getRemain() { + return null; } @Override - public void setValue(Object value) { - this.value = value; + public int size() { + return 0; } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.java deleted file mode 100644 index 66e1c575f0..0000000000 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineconn.acessible.executor.operator.impl; - -import org.apache.linkis.engineconn.common.exception.EngineConnException; -import org.apache.linkis.engineconn.core.executor.ExecutorManager$; -import org.apache.linkis.engineconn.core.executor.LabelExecutorManager; -import org.apache.linkis.engineconn.executor.entity.Executor; -import org.apache.linkis.engineconn.executor.entity.YarnExecutor; -import org.apache.linkis.governance.common.constant.ec.ECConstants; -import org.apache.linkis.manager.common.operator.Operator; - -import java.util.HashMap; -import java.util.Map; - -public class EngineConnApplicationInfoOperator implements Operator { - - public static final String OPERATOR_NAME = "engineConnYarnApplication"; - - @Override - public String[] getNames() { - return new String[] {OPERATOR_NAME}; - } - - @Override - public Map apply(Map parameters) { - LabelExecutorManager instance = ExecutorManager$.MODULE$.getInstance(); - Executor reportExecutor = instance.getReportExecutor(); - if (reportExecutor instanceof YarnExecutor) { - YarnExecutor yarnExecutor = (YarnExecutor) reportExecutor; - Map result = new HashMap<>(); - result.put(ECConstants.YARN_APPID_NAME_KEY(), yarnExecutor.getApplicationId()); - result.put(ECConstants.YARN_APP_URL_KEY(), yarnExecutor.getApplicationURL()); - result.put(ECConstants.QUEUE(), yarnExecutor.getQueue()); - result.put(ECConstants.YARN_MODE_KEY(), yarnExecutor.getYarnMode()); - return result; - } else { - throw new EngineConnException( - 20301, "EngineConn is not a yarn application, cannot fetch applicaiton info."); - } - } -} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala index 6e6b8b5986..e99f5f21c1 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala @@ -242,7 +242,7 @@ class AccessibleEngineConnExecution extends EngineConnExecution with Logging { case resourceExecutor: ResourceExecutor => ManagerService.getManagerService .reportUsedResource( - new ResourceUsedProtocol( + ResourceUsedProtocol( Sender.getThisServiceInstance, resourceExecutor.getCurrentNodeResource(), engineCreationContext.getTicketId diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala index 98f88508e8..6b96b6d4ed 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/info/NodeHealthyInfoManager.scala @@ -48,7 +48,7 @@ class DefaultNodeHealthyInfoManager extends NodeHealthyInfoManager with Logging val nodeHealthyInfo = new NodeHealthyInfo nodeHealthyInfo.setMsg("") - /** If it is actively set by the manager, then the manager setting shall prevail */ + /** 如果是manager主动设置的,则以manager设置的为准 */ val newHealthy: NodeHealthy = if (this.setByManager) { this.healthy } else { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala index 260e675a1d..a7169697ae 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala @@ -44,10 +44,17 @@ object LogHelper extends Logging { def setLogListener(logListener: LogListener): Unit = this.logListener = logListener + def cacheLog(log: String): Unit = { + logCache.cacheLog(log) + } + def pushAllRemainLogs(): Unit = { + // logger.info(s"start to push all remain logs") Thread.sleep(30) + // logCache.synchronized{ if (logListener == null) { logger.warn("logListener is null, can not push remain logs") + // return } else { var logs: util.List[String] = null diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala new file mode 100644 index 0000000000..c7635615e0 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.acessible.executor.operator.impl + +import org.apache.linkis.engineconn.acessible.executor.service.OperateService +import org.apache.linkis.engineconn.common.exception.EngineConnException +import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.engineconn.executor.entity.YarnExecutor +import org.apache.linkis.governance.common.constant.ec.ECConstants._ +import org.apache.linkis.manager.common.operator.Operator + +class EngineConnApplicationInfoOperator extends Operator { + + override def getNames: Array[String] = Array(EngineConnApplicationInfoOperator.OPERATOR_NAME) + + override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { + ExecutorManager.getInstance.getReportExecutor match { + case yarnExecutor: YarnExecutor => + Map( + YARN_APPID_NAME_KEY -> yarnExecutor.getApplicationId, + YARN_APP_URL_KEY -> yarnExecutor.getApplicationURL, + QUEUE -> yarnExecutor.getQueue, + YARN_MODE_KEY -> yarnExecutor.getYarnMode + ) + case _ => + throw EngineConnException( + 20301, + "EngineConn is not a yarn application, cannot fetch applicaiton info." + ) + } + } + +} + +object EngineConnApplicationInfoOperator { + val OPERATOR_NAME = "engineConnYarnApplication" +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala index 7d4dc3b08a..fe3d731b7d 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala @@ -57,8 +57,7 @@ class DefaultManagerService extends ManagerService with Logging { logger.info("engineType labels is empty, Not reported") return } - val labelReportRequest = - new LabelReportRequest(reportLabel.asJava, Sender.getThisServiceInstance) + val labelReportRequest = LabelReportRequest(reportLabel.asJava, Sender.getThisServiceInstance) getManagerSender.send(labelReportRequest) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala index 20399711bd..c0ef50636d 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala @@ -30,8 +30,6 @@ import org.apache.commons.lang3.exception.ExceptionUtils import org.springframework.stereotype.Service -import java.util - import scala.collection.JavaConverters.mapAsScalaMapConverter @Service @@ -42,36 +40,25 @@ class DefaultOperateService extends OperateService with Logging { engineOperateRequest: EngineOperateRequest ): EngineOperateResponse = { var response: EngineOperateResponse = null - val parameters = { - val map = new util.HashMap[String, Object]() - engineOperateRequest.getParameters.asScala.foreach(entry => map.put(entry._1, entry._2)) - map - } - val operator = Utils.tryCatch(OperatorFactory.apply().getOperatorRequest(parameters)) { t => - logger.error(s"Get operator failed, parameters is ${engineOperateRequest.getParameters}.", t) - response = new EngineOperateResponse( - new util.HashMap[String, Object](), - true, - ExceptionUtils.getRootCauseMessage(t) - ) + + val parameters = engineOperateRequest.parameters.asScala.toMap + val operator = Utils.tryCatch(OperatorFactory().getOperatorRequest(parameters)) { t => + logger.error(s"Get operator failed, parameters is ${engineOperateRequest.parameters}.", t) + response = EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) doPostHook(engineOperateRequest, response) return response } logger.info( - s"Try to execute operator ${operator.getClass.getSimpleName} with parameters ${engineOperateRequest.getParameters}." + s"Try to execute operator ${operator.getClass.getSimpleName} with parameters ${engineOperateRequest.parameters}." ) val result = Utils.tryCatch(operator(parameters)) { t => logger.error(s"Execute ${operator.getClass.getSimpleName} failed.", t) - response = new EngineOperateResponse( - new util.HashMap[String, Object](), - true, - ExceptionUtils.getRootCauseMessage(t) - ) + response = EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) doPostHook(engineOperateRequest, response) return response } logger.info(s"End to execute operator ${operator.getClass.getSimpleName}.") - response = new EngineOperateResponse(result) + response = EngineOperateResponse(result) doPostHook(engineOperateRequest, response) response } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala index b5bbc26f92..026234e938 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala @@ -81,12 +81,12 @@ class EngineConnTimedLockService extends LockService with Logging { @throws[EngineConnExecutorErrorException] override def tryLock(requestEngineLock: RequestEngineLock): Option[String] = synchronized { if (null != engineConnLock && engineConnLock.isAcquired()) return None - this.lockType = requestEngineLock.getLockType + this.lockType = requestEngineLock.lockType lockType match { case EngineLockType.Always => timedLock(-1) case EngineLockType.Timed => - timedLock(requestEngineLock.getTimeout) + timedLock(requestEngineLock.timeout) case o: Any => logger.error("Invalid lockType : " + BDPJettyServerHelper.gson.toJson(o)) return Some(null) @@ -172,11 +172,11 @@ class EngineConnTimedLockService extends LockService with Logging { @Receiver override def requestUnLock(requestEngineUnlock: RequestEngineUnlock): ResponseEngineUnlock = { - if (StringUtils.isBlank(requestEngineUnlock.getLock)) { + if (StringUtils.isBlank(requestEngineUnlock.lock)) { logger.error("Invalid requestEngineUnlock: ") - new ResponseEngineUnlock(false) + ResponseEngineUnlock(false) } else { - new ResponseEngineUnlock(unlock(requestEngineUnlock.getLock)) + ResponseEngineUnlock(unlock(requestEngineUnlock.lock)) } } @@ -221,7 +221,7 @@ class EngineConnConcurrentLockService extends LockService { @Receiver override def requestUnLock(requestEngineUnlock: RequestEngineUnlock): ResponseEngineUnlock = - new ResponseEngineUnlock(true) + ResponseEngineUnlock(true) override def onAddLock(addLockEvent: ExecutorLockEvent): Unit = {} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/LockService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/LockService.scala index 28e4720953..0860076e40 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/LockService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/LockService.scala @@ -49,15 +49,14 @@ trait LockService extends ExecutorLockListener with Logging { // Engine can be locked if (!StringUtils.isBlank(lockStr)) { // lock success - response = - new ResponseEngineLock(true, lockStr, s"Lock for ${requestEngineLock.getTimeout} ms") + response = ResponseEngineLock(true, lockStr, s"Lock for ${requestEngineLock.timeout} ms") } else { // lock failed - response = new ResponseEngineLock(false, lockStr, "lock str is blank") + response = ResponseEngineLock(false, lockStr, "lock str is blank") } case None => // Engine is busy - response = new ResponseEngineLock(false, null, "Engine is busy.") + response = ResponseEngineLock(false, null, "Engine is busy.") } logger.info( "RequestLock : " + BDPJettyServerHelper.gson.toJson( diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala index 27dd91cebe..9b4a3ebc28 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/utils/AccessableExecutorUtils.scala @@ -26,6 +26,7 @@ object AccessibleExecutorUtils { val manager: DefaultNodeHealthyInfoManager = getApplicationContext.getBean(classOf[DefaultNodeHealthyInfoManager]) + /** 当前引擎是否不健康 不健康返回 true */ def currentEngineIsUnHealthy(): Boolean = { manager != null && manager.getNodeHealthy() == NodeHealthy.UnHealthy } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala index 07cfa51d0a..dfe1137084 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala @@ -29,10 +29,13 @@ import org.apache.linkis.engineconn.common.conf.EngineConnConf import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.common.engineconn.EngineConn import org.apache.linkis.engineconn.common.hook.EngineConnHook +import org.apache.linkis.engineconn.core.EngineConnObject import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.core.hook.ShutdownHook import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback +import org.apache.linkis.manager.label.constant.LabelValueConstant +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.rpc.Sender import org.apache.linkis.server.conf.ServerConfiguration @@ -60,7 +63,15 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { val newMap = map.++(parser.getSpringConfMap) newMap.put("spring.mvc.servlet.path", ServerConfiguration.BDP_SERVER_RESTFUL_URI.getValue) DataWorkCloudApplication.main(DWCArgumentsParser.formatSpringOptions(newMap.toMap)) + val context = EngineConnObject.getEngineCreationContext + val label = LabelUtil.getEngingeConnRuntimeModeLabel(context.getLabels()) + if (null != label && label.getModeValue.equals(LabelValueConstant.YARN_CLUSTER_VALUE)) { + logger.info("cluster mode call back will be invoke in beforeExecutionExecute") + } else { + val engineConnPidCallBack = new EngineConnIdentifierCallback() + Utils.tryAndError(engineConnPidCallBack.callback()) + } logger.info("<--------------------SpringBoot App init succeed-------------------->") } @@ -68,8 +79,14 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { engineCreationContext: EngineCreationContext, engineConn: EngineConn ): Unit = { - val engineConnIdentifierCallback = new EngineConnIdentifierCallback() - Utils.tryAndError(engineConnIdentifierCallback.callback()) + val context = EngineConnObject.getEngineCreationContext + + val label = LabelUtil.getEngingeConnRuntimeModeLabel(context.getLabels()) + if (null != label && label.getModeValue.equals(LabelValueConstant.YARN_CLUSTER_VALUE)) { + logger.info("cluster mode call back be invoke") + val engineConnPidCallBack = new EngineConnIdentifierCallback() + Utils.tryAndError(engineConnPidCallBack.callback()) + } } override def afterExecutionExecute( @@ -85,7 +102,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { val prefixMsg = Sender.getThisServiceInstance + s": log dir: ${EngineConnConf.getLogDir}," Utils.tryAndError( engineConnAfterStartCallback.callback( - new EngineConnStatusCallback( + EngineConnStatusCallback( Sender.getThisServiceInstance, engineCreationContext.getTicketId, NodeStatus.Failed, @@ -125,7 +142,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { val engineConnAfterStartCallback = new EngineConnAfterStartCallback Utils.tryAndError( engineConnAfterStartCallback.callback( - new EngineConnStatusCallback( + EngineConnStatusCallback( Sender.getThisServiceInstance, engineCreationContext.getTicketId, getNodeStatusOfStartSuccess(engineCreationContext, engineConn), diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala index efd74e9077..d1eb83d391 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala @@ -35,7 +35,7 @@ abstract class AbstractEngineConnStartUpCallback() extends EngineConnCallback wi def callback(protocol: RequestProtocol): Unit = { protocol match { case protocol: EngineConnStatusCallback => - if (protocol.getStatus().equals(NodeStatus.Failed)) { + if (protocol.status.equals(NodeStatus.Failed)) { logger.error(s"EngineConn Start Failed protocol will send to LM: ${protocol}") } else { logger.info(s"protocol will send to lm: ${protocol}") diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnIdentifierCallback.scala similarity index 99% rename from linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala rename to linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnIdentifierCallback.scala index 71f71f1999..8b9e3ad36e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnIdentifierCallback.scala @@ -30,6 +30,7 @@ import java.lang.management.ManagementFactory class EngineConnIdentifierCallback extends AbstractEngineConnStartUpCallback { override def callback(): Unit = { + var identifier = ManagementFactory.getRuntimeMXBean.getName.split("@")(0) val instance = Sender.getThisServiceInstance val context = EngineConnObject.getEngineCreationContext diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnTimedCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnTimedCallback.scala new file mode 100644 index 0000000000..f323bf2b2f --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnTimedCallback.scala @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.callback.service + +trait EngineConnTimedCallback extends EngineConnCallback {} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala index a7675287e0..7f70e21e21 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/ExecutorExecutionContext.scala @@ -20,11 +20,11 @@ package org.apache.linkis.engineconn.executor import org.apache.linkis.common.io.{FsPath, MetaData, Record} import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetWriter} import org.apache.linkis.common.utils.Utils -import org.apache.linkis.governance.common.conf.GovernanceCommonConf +import org.apache.linkis.engineconn.executor.conf.EngineConnExecutorConfiguration +import org.apache.linkis.governance.common.utils.GovernanceUtils import org.apache.linkis.manager.label.entity.Label import org.apache.commons.lang3.StringUtils -import org.apache.commons.lang3.time.DateFormatUtils import java.util.concurrent.atomic.AtomicInteger @@ -60,9 +60,12 @@ trait ExecutorExecutionContext { def setLabels(labels: Array[Label[_]]): Unit = this.labels = labels protected def getDefaultStorePath: String = { - val path = GovernanceCommonConf.RESULT_SET_STORE_PATH.getValue - val pathPrefix = (if (path.endsWith("/")) path else path + "/") + Utils.getJvmUser + "/" + - DateFormatUtils.format(System.currentTimeMillis(), "yyyyMMdd") + "/" + val path = if (EngineConnExecutorConfiguration.LINKIS_RES_DEFAULT_ENABLED) { + GovernanceUtils.getResultParentPath(GovernanceUtils.LINKIS_DEFAULT_RES_CREATOR) + } else { + "hdfs:///apps-data/" + Utils.getJvmUser + } + val pathPrefix = (if (path.endsWith("/")) path else path + "/") + Utils.getJvmUser + "/" getJobId.map(pathPrefix + _ + "/" + System.nanoTime).getOrElse(pathPrefix + System.nanoTime) } @@ -81,11 +84,11 @@ trait ExecutorExecutionContext { protected def getDefaultResultSetByType: String def createDefaultResultSetWriter(): ResultSetWriter[_ <: MetaData, _ <: Record] = { - createResultSetWriter(getResultSetByType(getDefaultResultSetByType)) // todo check + createResultSetWriter(getResultSetByType(getDefaultResultSetByType)) } def createDefaultResultSetWriter(alias: String): ResultSetWriter[_ <: MetaData, _ <: Record] = - createResultSetWriter(getResultSetByType(getDefaultResultSetByType), alias) // todo check + createResultSetWriter(getResultSetByType(getDefaultResultSetByType), alias) def createResultSetWriter(resultSetType: String): ResultSetWriter[_ <: MetaData, _ <: Record] = createResultSetWriter(getResultSetByType(resultSetType), null) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala index 813022ceb8..f847b9c34f 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/conf/EngineConnExecutorConfiguration.scala @@ -66,4 +66,7 @@ object EngineConnExecutorConfiguration { val DEFAULT_EXECUTOR_NAME = CommonVars("wds.linkis.engineconn.executor.default.name", "ComputationExecutor") + val LINKIS_RES_DEFAULT_ENABLED = + CommonVars("wds.linkis.engineconn.res.default.enabled", true).getValue + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java index 034022f169..24c8b904cd 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java @@ -30,11 +30,9 @@ public enum EngineconnCoreErrorCodeSummary implements LinkisErrorCode { CANNOT_DEFAULT_EF(20000, "Cannot find default ExecutorFactory(找不到默认的 ExecutorFactory)"), ETL_NOT_EXISTS(20000, "EngineTypeLabel does not exist(EngineTypeLabel 不存在)"), UCL_NOT_EXISTS(20000, "UserCreatorLabel does not exist(UserCreatorLabel 不存在)"), - CANNOT_HOME_PATH_EC( - 20001, "Cannot find the home path of engineConn at: {0}(找不到 engineConn 的 home 路径,该路径为:{0})"), + CANNOT_HOME_PATH_EC(20001, "Cannot find the home path of engineConn(找不到 engineConn 的 home 路径)"), CANNOT_HOME_PATH_DIST( - 20001, - "Could not find the home path for engineconn dist at: {0}(找不到 engineconn dist 的 home 路径,该路径为:{0})"), + 20001, "Cannot find the home path:{0} of engineconn dist(找不到 engineconn dist 的 home 路径)"), DIST_IS_EMPTY( 20001, "The dist of EngineConn is empty,engineConnType is:{0}(EngineConn 的 dist 为空,engineConnType为:{0})"), diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorCode.scala new file mode 100644 index 0000000000..5f1537525c --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorCode.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.engineplugin.common.exception + +object EngineConnPluginErrorCode { + + def INVALID_RUNTYPE: Int = 70101 + + def INVALID_LABELS: Int = 70102 + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorException.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorException.scala index d7ab70908a..41cf435459 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorException.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/exception/EngineConnPluginErrorException.scala @@ -17,9 +17,6 @@ package org.apache.linkis.manager.engineplugin.common.exception -import org.apache.linkis.common.exception.{ErrorException, ExceptionLevel, LinkisRuntimeException} +import org.apache.linkis.common.exception.ErrorException -class EngineConnPluginErrorException(code: Int, msg: String) - extends LinkisRuntimeException(code, msg) { - override def getLevel: ExceptionLevel = ExceptionLevel.ERROR -} +class EngineConnPluginErrorException(code: Int, msg: String) extends ErrorException(code, msg) {} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala index aada8caedc..02565a394b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala @@ -34,6 +34,9 @@ class UserNodeResource extends NodeResource { private var leftResource: Resource = _ private var createTime: Date = _ private var updateTime: Date = _ + private var maxApps: Int = _ + private var numPendingApps: Int = _ + private var numActiveApps: Int = _ def getUser: String = user @@ -87,4 +90,23 @@ class UserNodeResource extends NodeResource { override def getId: Integer = id override def setId(id: Integer): Unit = this.id = id + + override def getMaxApps: Integer = maxApps + + override def setMaxApps(maxApps: Integer): Unit = { + this.maxApps = maxApps + } + + override def getNumPendingApps: Integer = numPendingApps + + override def setNumPendingApps(numPendingApps: Integer): Unit = { + this.numPendingApps = numPendingApps + } + + override def getNumActiveApps: Integer = numActiveApps + + override def setNumActiveApps(numActiveApps: Integer): Unit = { + this.numActiveApps = numActiveApps + } + } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java index 69f15ea864..86af74d5c8 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/parser/ParserUtils.java @@ -66,7 +66,7 @@ public static void generateLogPath(JobRequest jobRequest, Map pa String creator = LabelUtil.getUserCreator(jobRequest.getLabels())._2; String umUser = jobRequest.getExecuteUser(); FsPath lopPrefixPath = new FsPath(logPathPrefix); - if (StorageUtils.HDFS.equals(lopPrefixPath.getFsType())) { + if (StorageUtils.HDFS().equals(lopPrefixPath.getFsType())) { String commonLogPath = logPathPrefix + "/" + "log" + "/" + dateString + "/" + creator; logPath = commonLogPath + "/" + umUser + "/" + jobRequest.getId() + ".log"; CommonLogPathUtils.buildCommonPath(commonLogPath); diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java index af7134c5b6..57d2ed04d0 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java @@ -515,7 +515,7 @@ private void buildYarnResource( metricsVo.put(TaskConstant.JOB_YARNRESOURCE, resoureList); Optional cores = resourceMap.values().stream() - .map(resource -> resource.getQueueCores()) + .map(resource -> resource.queueCores()) .reduce((x, y) -> x + y); Optional memory = resourceMap.values().stream() diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LogPathCreateInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LogPathCreateInterceptor.scala index 1b093bedd9..5ed8d88fe0 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LogPathCreateInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/LogPathCreateInterceptor.scala @@ -24,6 +24,8 @@ import org.apache.linkis.entrance.interceptor.exception.LogPathCreateException import org.apache.linkis.entrance.parser.ParserUtils import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.commons.lang3.exception.ExceptionUtils + /** * Description:Log path generation interceptor, used to set the path log of the task(日志路径生成拦截器, * 用于设置task的路径日志) @@ -41,14 +43,14 @@ class LogPathCreateInterceptor extends EntranceInterceptor with Logging { case e: ErrorException => val exception: LogPathCreateException = LogPathCreateException( 20075, - "Failed to get logPath(获取logPath失败),reason: " + e.getMessage + "Failed to get logPath(获取logPath失败),reason msg: " + e.getMessage ) exception.initCause(e) exception case t: Throwable => val exception: LogPathCreateException = LogPathCreateException( 20075, - "Failed to get logPath(获取logPath失败), reason: " + t.getCause + "Failed to get logPath(获取logPath失败), reason: " + ExceptionUtils.getStackTrace(t) ) exception.initCause(t) exception diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml b/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml index 36076024f2..d6dee78ed2 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml @@ -129,6 +129,12 @@ kubernetes-model-core ${kubernetes-client.version} + + + org.apache.linkis + linkis-ps-common-lock + ${project.version} + diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/EngineConnPluginLoaderConf.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/EngineConnPluginLoaderConf.java deleted file mode 100644 index 59d9959431..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/EngineConnPluginLoaderConf.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.loader; - -import org.apache.linkis.common.conf.CommonVars; -import org.apache.linkis.common.conf.Configuration; - -public class EngineConnPluginLoaderConf { - - public static final CommonVars ENGINE_PLUGIN_LOADER_DEFAULT_USER = - CommonVars.apply("wds.linkis.engineconn.plugin.loader.defaultUser", "hadoop"); - - public static final CommonVars ENGINE_PLUGIN_STORE_PATH = - CommonVars.apply( - "wds.linkis.engineconn.plugin.loader.store.path", - CommonVars.apply( - "ENGINE_CONN_HOME", - Configuration.getLinkisHome() + "/lib/linkis-engineconn-plugins") - .getValue()); - - public static final CommonVars ENGINE_PLUGIN_PROPERTIES_NAME = - CommonVars.apply("wds.linkis.engineconn.plugin.loader.properties.name", "plugins.properties"); - - public static final CommonVars ENGINE_PLUGIN_LOADER_CACHE_REFRESH_INTERVAL = - CommonVars.apply("wds.linkis.engineconn.plugin.loader.cache.refresh-interval", "300"); - - public static final CommonVars DOWNLOAD_TEMP_DIR_PREFIX = - CommonVars.apply("wds.linkis.engineconn.plugin.loader.download.tmpdir.prefix", ".BML_TMP_"); -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/CacheablesEngineConnPluginLoader.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/CacheablesEngineConnPluginLoader.java index 059f984fa4..ef72664460 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/CacheablesEngineConnPluginLoader.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/CacheablesEngineConnPluginLoader.java @@ -59,7 +59,8 @@ public CacheablesEngineConnPluginLoader() { @Override public long interval() { return Long.parseLong( - EngineConnPluginLoaderConf.ENGINE_PLUGIN_LOADER_CACHE_REFRESH_INTERVAL.getValue()); + EngineConnPluginLoaderConf.ENGINE_PLUGIN_LOADER_CACHE_REFRESH_INTERVAL() + .getValue()); } @Override diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java index e3ae5ccde1..0e54ed8c4e 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java @@ -17,12 +17,13 @@ package org.apache.linkis.engineplugin.loader.loaders; +import org.apache.linkis.common.exception.ErrorException; import org.apache.linkis.engineplugin.loader.EngineConnPluginLoaderConf; import org.apache.linkis.engineplugin.loader.classloader.EngineConnPluginClassLoader; import org.apache.linkis.engineplugin.loader.loaders.resource.LocalEngineConnPluginResourceLoader; import org.apache.linkis.engineplugin.loader.loaders.resource.PluginResource; import org.apache.linkis.engineplugin.loader.utils.EngineConnPluginUtils; -import org.apache.linkis.manager.am.exception.AMErrorException; +import org.apache.linkis.engineplugin.loader.utils.ExceptionHelper; import org.apache.linkis.manager.engineplugin.common.EngineConnPlugin; import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginLoadException; import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginNotFoundException; @@ -57,16 +58,17 @@ public class DefaultEngineConnPluginLoader extends CacheablesEngineConnPluginLoa private static final String PLUGIN_DIR = "plugin"; - public DefaultEngineConnPluginLoader() { + public DefaultEngineConnPluginLoader() throws ErrorException { // Check store path (is necessary) - String storePath = EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH.getValue(); + String storePath = EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH().getValue(); LOG.info("DefaultEngineConnPluginLoader, storePath:" + storePath); if (StringUtils.isBlank(storePath)) { - throw new AMErrorException( + ExceptionHelper.dealErrorException( 70061, "You should defined [" - + EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH.key() - + "] in properties file"); + + EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH().key() + + "] in properties file", + null); } // The path can be uri try { @@ -78,14 +80,17 @@ public DefaultEngineConnPluginLoader() { } catch (URISyntaxException e) { // Ignore } catch (IllegalArgumentException e) { - throw new AMErrorException( + ExceptionHelper.dealErrorException( 70061, - "You should defined [" - + EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH.key() - + "] in properties file"); + "The value:[" + + storePath + + "] of [" + + EngineConnPluginLoaderConf.ENGINE_PLUGIN_STORE_PATH().key() + + "] is incorrect", + e); } this.rootStorePath = storePath; - this.pluginPropsName = EngineConnPluginLoaderConf.ENGINE_PLUGIN_PROPERTIES_NAME.getValue(); + this.pluginPropsName = EngineConnPluginLoaderConf.ENGINE_PLUGIN_PROPERTIES_NAME().getValue(); // Prepare inner loaders // resourceLoaders.add(new BmlEngineConnPluginResourceLoader()); resourceLoaders.add(new LocalEngineConnPluginResourceLoader()); diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/resource/BmlEngineConnPluginResourceLoader.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/resource/BmlEngineConnPluginResourceLoader.java index 23607a7063..8b8a071480 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/resource/BmlEngineConnPluginResourceLoader.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/resource/BmlEngineConnPluginResourceLoader.java @@ -58,13 +58,13 @@ public class BmlEngineConnPluginResourceLoader implements EngineConnPluginsResou private String downloadTmpDir; public BmlEngineConnPluginResourceLoader() { - this(EngineConnPluginLoaderConf.ENGINE_PLUGIN_LOADER_DEFAULT_USER.getValue(), null); + this(EngineConnPluginLoaderConf.ENGINE_PLUGIN_LOADER_DEFAULT_USER().getValue(), null); } public BmlEngineConnPluginResourceLoader(String clientUser, Map clientProps) { this.clientUser = clientUser; this.bmlClient = BmlClientFactory.createBmlClient(clientUser, clientProps); - this.downloadTmpDir = EngineConnPluginLoaderConf.DOWNLOAD_TEMP_DIR_PREFIX.getValue(); + this.downloadTmpDir = EngineConnPluginLoaderConf.DOWNLOAD_TEMP_DIR_PREFIX().getValue(); } @Override diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java index 5e71dadc11..2bfcd00aca 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java @@ -93,7 +93,7 @@ private static List getJarsUrlsOfPathRecurse(String path, List classPa parentFile.listFiles( (file) -> { String name = file.getName(); - return !file.isHidden() + return !name.startsWith(".") && (file.isDirectory() || name.endsWith(JAR_SUF_NAME) || name.endsWith(CLASS_SUF_NAME)); diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.java deleted file mode 100644 index 395471e78b..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.conf; - -import org.apache.linkis.common.conf.CommonVars; -import org.apache.linkis.common.conf.Configuration; - -public class EngineConnPluginConfiguration { - - public static final CommonVars ENGINE_CONN_HOME = - CommonVars.apply( - "wds.linkis.engineconn.home", - CommonVars.apply( - "ENGINE_CONN_HOME", - Configuration.getLinkisHome() + "/lib/linkis-engineconn-plugins") - .getValue()); - - public static final CommonVars ENGINE_CONN_DIST_LOAD_ENABLE = - CommonVars.apply("wds.linkis.engineconn.dist.load.enable", true); - - public static final CommonVars ENABLED_BML_UPLOAD_FAILED_EXIT = - CommonVars.apply("wds.linkis.engineconn.bml.upload.failed.enable", true); - - // for third party eg appconn/datax, if all update, can set to false then to remove - public static final CommonVars EC_BML_VERSION_MAY_WITH_PREFIX_V = - CommonVars.apply("linkis.engineconn.bml.version.may.with.prefix", true); -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/loader/EngineConnPluginsLoaderFactory.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/loader/EngineConnPluginsLoaderFactory.java deleted file mode 100644 index 280c35d660..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/loader/EngineConnPluginsLoaderFactory.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.loader; - -import org.apache.linkis.engineplugin.loader.loaders.DefaultEngineConnPluginLoader; - -public class EngineConnPluginsLoaderFactory { - - private static final org.apache.linkis.engineplugin.loader.loaders.EngineConnPluginsLoader - engineConnPluginsLoader = new DefaultEngineConnPluginLoader(); - - public static org.apache.linkis.engineplugin.loader.loaders.EngineConnPluginsLoader - getEngineConnPluginsLoader() { - return engineConnPluginsLoader; - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java deleted file mode 100644 index 2c5d743205..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.localize; - -import org.apache.linkis.engineplugin.server.conf.EngineConnPluginConfiguration; -import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; - -import org.apache.commons.lang3.StringUtils; - -import java.io.File; -import java.nio.file.Paths; -import java.text.MessageFormat; -import java.util.Arrays; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary.*; - -public abstract class AbstractEngineConnBmlResourceGenerator - implements EngineConnBmlResourceGenerator { - - private static final Logger logger = - LoggerFactory.getLogger(AbstractEngineConnBmlResourceGenerator.class); - - public AbstractEngineConnBmlResourceGenerator() { - if (!new File(getEngineConnsHome()).exists()) { - throw new EngineConnPluginErrorException( - CANNOT_HOME_PATH_EC.getErrorCode(), - MessageFormat.format(CANNOT_HOME_PATH_EC.getErrorDesc(), getEngineConnsHome())); - } - } - - public String getEngineConnsHome() { - return EngineConnPluginConfiguration.ENGINE_CONN_HOME.getValue(); - } - - protected String getEngineConnDistHome(EngineTypeLabel engineConnTypeLabel) { - return getEngineConnDistHome( - engineConnTypeLabel.getEngineType(), engineConnTypeLabel.getVersion()); - } - - protected String getEngineConnDistHome(String engineConnType, String version) { - String engineConnDistHome = - Paths.get(getEngineConnsHome(), engineConnType, "dist").toFile().getPath(); - checkEngineConnDistHome(engineConnDistHome); - if (StringUtils.isBlank(version) - || EngineConnBmlResourceGenerator.NO_VERSION_MARK.equals(version)) { - return engineConnDistHome; - } - String engineConnPackageHome = Paths.get(engineConnDistHome, version).toFile().getPath(); - logger.info("getEngineConnDistHome, engineConnPackageHome path:" + engineConnPackageHome); - File engineConnPackageHomeFile = new File(engineConnPackageHome); - if (!engineConnPackageHomeFile.exists()) { - if (!version.startsWith("v") - && (boolean) EngineConnPluginConfiguration.EC_BML_VERSION_MAY_WITH_PREFIX_V.getValue()) { - String versionOld = "v" + version; - String engineConnPackageHomeOld = - Paths.get(engineConnDistHome, versionOld).toFile().getPath(); - logger.info( - "try to getEngineConnDistHome with prefix v, engineConnPackageHome path:" - + engineConnPackageHomeOld); - File engineConnPackageHomeFileOld = new File(engineConnPackageHomeOld); - if (!engineConnPackageHomeFileOld.exists()) { - throw new EngineConnPluginErrorException( - ENGINE_VERSION_NOT_FOUND.getErrorCode(), - MessageFormat.format( - ENGINE_VERSION_NOT_FOUND.getErrorDesc(), version, engineConnType)); - } else { - return engineConnPackageHomeOld; - } - } else { - throw new EngineConnPluginErrorException( - ENGINE_VERSION_NOT_FOUND.getErrorCode(), - MessageFormat.format(ENGINE_VERSION_NOT_FOUND.getErrorDesc(), version, engineConnType)); - } - } else { - return engineConnPackageHome; - } - } - - private void checkEngineConnDistHome(String engineConnPackageHomePath) { - File engineConnPackageHomeFile = new File(engineConnPackageHomePath); - checkEngineConnDistHome(engineConnPackageHomeFile); - } - - private void checkEngineConnDistHome(File engineConnPackageHome) { - if (!engineConnPackageHome.exists()) { - throw new EngineConnPluginErrorException( - CANNOT_HOME_PATH_DIST.getErrorCode(), - MessageFormat.format( - CANNOT_HOME_PATH_DIST.getErrorDesc(), engineConnPackageHome.getPath())); - } - } - - protected String[] getEngineConnDistHomeList(String engineConnType) { - String engineConnDistHome = - Paths.get(getEngineConnsHome(), engineConnType, "dist").toFile().getPath(); - File engineConnDistHomeFile = new File(engineConnDistHome); - checkEngineConnDistHome(engineConnDistHomeFile); - File[] children = engineConnDistHomeFile.listFiles(); - if (children.length == 0) { - throw new EngineConnPluginErrorException( - DIST_IS_EMPTY.getErrorCode(), - MessageFormat.format(DIST_IS_EMPTY.getErrorDesc(), engineConnType)); - } else { - return Arrays.stream(children).map(File::getPath).toArray(String[]::new); - } - } - - @Override - public String[] getEngineConnTypeListFromDisk() { - return Arrays.stream(new File(getEngineConnsHome()).listFiles()) - .filter(file -> !file.isHidden() && file.isDirectory()) - .map(file -> file.getName()) - .toArray(String[]::new); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java deleted file mode 100644 index c8ebc50633..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.localize; - -import org.apache.linkis.common.utils.ZipUtils; -import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException; -import org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary; - -import java.io.File; -import java.text.MessageFormat; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary.NO_PERMISSION_FILE; - -public class DefaultEngineConnBmlResourceGenerator extends AbstractEngineConnBmlResourceGenerator { - - private static final Logger logger = - LoggerFactory.getLogger(DefaultEngineConnBmlResourceGenerator.class); - - public DefaultEngineConnBmlResourceGenerator() {} - - @Override - public Map generate(String engineConnType) { - String[] engineConnDistHomes = getEngineConnDistHomeList(engineConnType); - Map resultMap = new HashMap<>(); - for (String path : engineConnDistHomes) { - - File versionFile = new File(path); - logger.info("generate, versionFile:" + path); - if (!versionFile.isDirectory()) { - logger.warn("File is not dir {},skip to upload", path); - continue; - } - String key = versionFile.getName(); - - try { - EngineConnLocalizeResource[] engineConnLocalizeResources = - generateDir(versionFile.getPath()); - resultMap.put(key, engineConnLocalizeResources); - } catch (Throwable t) { - logger.error("Generate dir : " + path + " error, msg : " + t.getMessage(), t); - throw t; - } - } - - return resultMap; - } - - @Override - public EngineConnLocalizeResource[] generate(String engineConnType, String version) { - String path = getEngineConnDistHome(engineConnType, version); - return generateDir(path); - } - - private EngineConnLocalizeResource[] generateDir(String path) { - File distFile = new File(path); - if (!distFile.isDirectory()) { - logger.warn("File is not dir {},skip to upload", path); - throw new EngineConnPluginErrorException( - EngineconnCoreErrorCodeSummary.DIST_IRREGULAR_EXIST.getErrorCode(), - path + " is not dir, to delete this file then retry"); - } - logger.info("generateDir, distFile:" + path); - File[] validFiles = - distFile.listFiles( - f -> - !f.getName().endsWith(".zip") - || !new File(path, f.getName().replace(".zip", "")).exists()); - - return Arrays.stream(validFiles) - .map( - file -> { - if (file.isFile()) { - return new EngineConnLocalizeResourceImpl( - file.getPath(), file.getName(), file.lastModified(), file.length()); - } else { - File newFile = new File(path, file.getName() + ".zip"); - if (newFile.exists() && !newFile.delete()) { - throw new EngineConnPluginErrorException( - NO_PERMISSION_FILE.getErrorCode(), - MessageFormat.format(NO_PERMISSION_FILE.getErrorDesc(), newFile)); - } - - ZipUtils.fileToZip(file.getPath(), path, file.getName() + ".zip"); - // If it is a folder, the last update time here is the last update time of the - // folder, not the last update time of - // ZIP.(如果是文件夹,这里的最后更新时间,采用文件夹的最后更新时间,而不是ZIP的最后更新时间.) - return new EngineConnLocalizeResourceImpl( - newFile.getPath(), newFile.getName(), file.lastModified(), newFile.length()); - } - }) - .toArray(EngineConnLocalizeResource[]::new); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnLocalizeResourceImpl.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnLocalizeResourceImpl.java deleted file mode 100644 index 4ca366e8bf..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/EngineConnLocalizeResourceImpl.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.localize; - -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.InputStream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class EngineConnLocalizeResourceImpl implements EngineConnLocalizeResource { - private static final Logger logger = - LoggerFactory.getLogger(EngineConnLocalizeResourceImpl.class); - - private final String filePath; - private final String fileName; - private final long lastModified; - private final long fileSize; - - public EngineConnLocalizeResourceImpl( - String filePath, String fileName, long lastModified, long fileSize) { - this.filePath = filePath; - this.fileName = fileName; - this.lastModified = lastModified; - this.fileSize = fileSize; - } - - @Override - public InputStream getFileInputStream() { - try { - return new FileInputStream(filePath); - } catch (FileNotFoundException e) { - logger.warn("getFileInputStream failed filePath:[{}]", filePath, e); - } - return null; - } - - public String filePath() { - return filePath; - } - - public String fileName() { - return fileName; - } - - public long lastModified() { - return lastModified; - } - - public long fileSize() { - return fileSize; - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.java deleted file mode 100644 index 807daa2c97..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -import org.apache.linkis.engineplugin.server.loader.EngineConnPluginsLoaderFactory; -import org.apache.linkis.manager.am.exception.AMErrorCode; -import org.apache.linkis.manager.am.util.LinkisUtils; -import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException; -import org.apache.linkis.manager.engineplugin.common.launch.EngineConnLaunchBuilder; -import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnBuildRequest; -import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest; -import org.apache.linkis.manager.engineplugin.common.launch.process.EngineConnResourceGenerator; -import org.apache.linkis.manager.engineplugin.common.launch.process.JavaProcessEngineConnLaunchBuilder; -import org.apache.linkis.manager.engineplugin.common.loader.entity.EngineConnPluginInstance; -import org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.rpc.message.annotation.Receiver; - -import org.apache.commons.lang3.exception.ExceptionUtils; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@Component -public class DefaultEngineConnLaunchService implements EngineConnLaunchService { - - private static final Logger logger = - LoggerFactory.getLogger(DefaultEngineConnLaunchService.class); - - @Autowired private EngineConnResourceGenerator engineConnResourceGenerator; - - private EngineConnLaunchBuilder getEngineLaunchBuilder( - EngineTypeLabel engineTypeLabel, EngineConnBuildRequest engineBuildRequest) { - final EngineConnPluginInstance engineConnPluginInstance; - try { - engineConnPluginInstance = - EngineConnPluginsLoaderFactory.getEngineConnPluginsLoader() - .getEngineConnPlugin(engineTypeLabel); - } catch (Exception e) { - throw new EngineConnPluginErrorException( - AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorCode(), - AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorDesc()); - } - final EngineConnLaunchBuilder builder = - engineConnPluginInstance.plugin().getEngineConnLaunchBuilder(); - if (builder instanceof JavaProcessEngineConnLaunchBuilder) { - ((JavaProcessEngineConnLaunchBuilder) builder) - .setEngineConnResourceGenerator(engineConnResourceGenerator); - } - builder.setBuildRequest(engineBuildRequest); - return builder; - } - - @Override - @Receiver - public EngineConnLaunchRequest createEngineConnLaunchRequest( - EngineConnBuildRequest engineBuildRequest) { - final Optional engineTypeOption = - engineBuildRequest.labels().stream() - .filter(label -> label instanceof EngineTypeLabel) - .map(label -> (EngineTypeLabel) label) - .findFirst(); - - if (!engineTypeOption.isPresent()) { - throw new EngineConnPluginErrorException( - EngineconnCoreErrorCodeSummary.ETL_REQUESTED.getErrorCode(), - EngineconnCoreErrorCodeSummary.ETL_REQUESTED.getErrorDesc()); - } - - final EngineTypeLabel engineTypeLabel = engineTypeOption.get(); - return LinkisUtils.tryCatch( - () -> getEngineLaunchBuilder(engineTypeLabel, engineBuildRequest).buildEngineConn(), - (Throwable t) -> { - logger.error( - String.format( - "Failed to createEngineConnLaunchRequest(%s)", engineBuildRequest.ticketId()), - t); - throw new EngineConnPluginErrorException( - EngineconnCoreErrorCodeSummary.FAILED_CREATE_ELR.getErrorCode(), - String.format( - "%s, %s", - EngineconnCoreErrorCodeSummary.FAILED_CREATE_ELR.getErrorDesc(), - ExceptionUtils.getRootCauseMessage(t))); - }); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceFactoryService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceFactoryService.java deleted file mode 100644 index 2c0496c071..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceFactoryService.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -import org.apache.linkis.engineplugin.server.loader.EngineConnPluginsLoaderFactory; -import org.apache.linkis.manager.am.exception.AMErrorCode; -import org.apache.linkis.manager.am.exception.AMErrorException; -import org.apache.linkis.manager.common.entity.resource.NodeResource; -import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException; -import org.apache.linkis.manager.engineplugin.common.loader.entity.EngineConnPluginInstance; -import org.apache.linkis.manager.engineplugin.common.resource.EngineResourceFactory; -import org.apache.linkis.manager.engineplugin.common.resource.EngineResourceRequest; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.rpc.message.annotation.Receiver; - -import org.springframework.stereotype.Component; - -import java.util.Optional; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary.ETL_REQUESTED; - -@Component -public class DefaultEngineConnResourceFactoryService implements EngineConnResourceFactoryService { - - private static final Logger logger = - LoggerFactory.getLogger(DefaultEngineConnResourceFactoryService.class); - - @Override - public EngineResourceFactory getResourceFactoryBy(EngineTypeLabel engineType) { - final EngineConnPluginInstance engineConnPluginInstance; - try { - engineConnPluginInstance = - EngineConnPluginsLoaderFactory.getEngineConnPluginsLoader() - .getEngineConnPlugin(engineType); - } catch (Exception e) { - logger.warn("getResourceFactory failed engineType:{}", engineType, e); - throw new AMErrorException( - AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorCode(), - AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorDesc()); - } - return engineConnPluginInstance.plugin().getEngineResourceFactory(); - } - - @Override - @Receiver - public NodeResource createEngineResource(final EngineResourceRequest engineResourceRequest) { - logger.info(String.format("To invoke createEngineResource %s", engineResourceRequest)); - final Optional engineTypeOption = - engineResourceRequest.labels().stream() - .filter(label -> label instanceof EngineTypeLabel) - .map(label -> (EngineTypeLabel) label) - .findFirst(); - - if (!engineTypeOption.isPresent()) { - throw new EngineConnPluginErrorException( - ETL_REQUESTED.getErrorCode(), ETL_REQUESTED.getErrorDesc()); - } - - final EngineTypeLabel engineTypeLabel = engineTypeOption.get(); - return getResourceFactoryBy(engineTypeLabel).createEngineResource(engineResourceRequest); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.java deleted file mode 100644 index a33b1afde8..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.java +++ /dev/null @@ -1,351 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -import org.apache.linkis.bml.client.BmlClient; -import org.apache.linkis.bml.client.BmlClientFactory; -import org.apache.linkis.bml.protocol.BmlUpdateResponse; -import org.apache.linkis.bml.protocol.BmlUploadResponse; -import org.apache.linkis.common.utils.Utils; -import org.apache.linkis.engineplugin.server.conf.EngineConnPluginConfiguration; -import org.apache.linkis.engineplugin.server.dao.EngineConnBmlResourceDao; -import org.apache.linkis.engineplugin.server.entity.EngineConnBmlResource; -import org.apache.linkis.engineplugin.server.localize.EngineConnBmlResourceGenerator; -import org.apache.linkis.engineplugin.server.localize.EngineConnLocalizeResource; -import org.apache.linkis.manager.common.protocol.bml.BmlResource; -import org.apache.linkis.manager.common.protocol.bml.BmlResource.BmlResourceVisibility; -import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException; -import org.apache.linkis.manager.engineplugin.common.launch.process.EngineConnResource; -import org.apache.linkis.manager.engineplugin.common.launch.process.LaunchConstants; -import org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary; -import org.apache.linkis.rpc.message.annotation.Receiver; - -import org.apache.commons.lang3.StringUtils; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import javax.annotation.PostConstruct; - -import java.text.MessageFormat; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; -import java.util.stream.Stream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary.EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION; - -@Component -public class DefaultEngineConnResourceService extends EngineConnResourceService { - private static final Logger logger = - LoggerFactory.getLogger(DefaultEngineConnResourceService.class); - - @Autowired private EngineConnBmlResourceGenerator engineConnBmlResourceGenerator; - - @Autowired private EngineConnBmlResourceDao engineConnBmlResourceDao; - - private final BmlClient bmlClient = BmlClientFactory.createBmlClient(); - private boolean isRefreshing = false; - - @PostConstruct - @Override - public void init() { - if ((boolean) EngineConnPluginConfiguration.ENGINE_CONN_DIST_LOAD_ENABLE.getValue()) { - logger.info("Start to refresh all engineconn plugins when inited."); - refreshAll(false, false); - } - } - - private BmlResource uploadToBml(final EngineConnLocalizeResource localizeResource) { - final BmlUploadResponse response = - bmlClient.uploadResource( - Utils.getJvmUser(), localizeResource.fileName(), localizeResource.getFileInputStream()); - final BmlResource bmlResource = new BmlResource(); - bmlResource.setResourceId(response.resourceId()); - bmlResource.setVersion(response.version()); - return bmlResource; - } - - private BmlResource uploadToBml( - final EngineConnLocalizeResource localizeResource, final String resourceId) { - final BmlUpdateResponse response = - bmlClient.updateResource( - Utils.getJvmUser(), - resourceId, - localizeResource.fileName(), - localizeResource.getFileInputStream()); - final BmlResource bmlResource = new BmlResource(); - bmlResource.setResourceId(response.resourceId()); - bmlResource.setVersion(response.version()); - return bmlResource; - } - - @Override - public void refreshAll(boolean iswait, boolean force) { - if (!isRefreshing) { - synchronized (this) { - if (!isRefreshing) { - - final Runnable refreshTask = - new Runnable() { - @Override - public void run() { - isRefreshing = true; - logger.info("Try to initialize the dist resources of all EngineConns. "); - String[] engineConnTypeList = - engineConnBmlResourceGenerator.getEngineConnTypeListFromDisk(); - for (String engineConnType : engineConnTypeList) { - try { - logger.info( - "Try to initialize all versions of {}EngineConn.", engineConnType); - Map version2Localize = - engineConnBmlResourceGenerator.generate(engineConnType); - for (Map.Entry entry : - version2Localize.entrySet()) { - logger.info( - "Try to initialize {}EngineConn-{}.", engineConnType, entry.getKey()); - refresh(entry.getValue(), engineConnType, entry.getKey(), force); - } - - } catch (Exception t) { - if (!iswait - && EngineConnPluginConfiguration.ENABLED_BML_UPLOAD_FAILED_EXIT - .getValue()) { - logger.error("Failed to upload engine conn to bml, now exit!", t); - System.exit(1); - } - logger.error("Failed to upload engine conn to bml", t); - } - } - isRefreshing = false; - } - }; - Future future = Utils.defaultScheduler().submit(refreshTask); - - if (iswait) { - try { - future.get(); - } catch (InterruptedException | ExecutionException e) { - logger.info("DefaultEngineConnResourceService refreshTask execution failed", e); - } - } else { - logger.info("DefaultEngineConnResourceService IsRefreshing EngineConns..."); - } - } - } - } - } - - @Receiver - public boolean refreshAll(final RefreshAllEngineConnResourceRequest engineConnRefreshAllRequest) { - logger.info("Start to refresh all engineconn plugins."); - refreshAll(true, false); - return true; - } - - @Receiver - @Override - public boolean refresh( - final RefreshEngineConnResourceRequest engineConnRefreshRequest, final boolean force) { - final String engineConnType = engineConnRefreshRequest.getEngineConnType(); - final String version = engineConnRefreshRequest.getVersion(); - if ("*".equals(version) || StringUtils.isEmpty(version)) { - logger.info("Try to refresh all versions of {}EngineConn.", engineConnType); - Map version2Localize = - engineConnBmlResourceGenerator.generate(engineConnType); - for (Map.Entry entry : version2Localize.entrySet()) { - logger.info("Try to initialize {}EngineConn-{}.", engineConnType, entry.getKey()); - refresh(entry.getValue(), engineConnType, entry.getKey(), force); - } - - } else { - logger.info("Try to refresh {}EngineConn-{}.", engineConnType, version); - EngineConnLocalizeResource[] localize = - engineConnBmlResourceGenerator.generate(engineConnType, version); - refresh(localize, engineConnType, version, force); - } - return true; - } - - private void refresh( - final EngineConnLocalizeResource[] localize, - final String engineConnType, - final String version, - final boolean force) { - final List engineConnBmlResources = - engineConnBmlResourceDao.getAllEngineConnBmlResource(engineConnType, version); - - if (Stream.of(localize) - .filter( - localizeResource -> - StringUtils.equals( - LaunchConstants.ENGINE_CONN_CONF_DIR_NAME() + ".zip", - localizeResource.fileName()) - || StringUtils.equals( - LaunchConstants.ENGINE_CONN_LIB_DIR_NAME() + ".zip", - localizeResource.fileName())) - .count() - < 2) { - - throw new EngineConnPluginErrorException( - EngineconnCoreErrorCodeSummary.LIB_CONF_DIR_NECESSARY.getErrorCode(), - MessageFormat.format( - EngineconnCoreErrorCodeSummary.LIB_CONF_DIR_NECESSARY.getErrorDesc(), - engineConnType)); - } - - for (EngineConnLocalizeResource localizeResource : localize) { - - Optional resource = - engineConnBmlResources.stream() - .filter(r -> r.getFileName().equals(localizeResource.fileName())) - .findFirst(); - if (!resource.isPresent()) { - logger.info( - "Ready to upload a new bmlResource for {}EngineConn-{}. path: {}", - engineConnType, - version, - localizeResource.fileName()); - final BmlResource bmlResource = uploadToBml(localizeResource); - final EngineConnBmlResource engineConnBmlResource = new EngineConnBmlResource(); - engineConnBmlResource.setBmlResourceId(bmlResource.getResourceId()); - engineConnBmlResource.setBmlResourceVersion(bmlResource.getVersion()); - engineConnBmlResource.setCreateTime(new Date()); - engineConnBmlResource.setLastUpdateTime(new Date()); - engineConnBmlResource.setEngineConnType(engineConnType); - engineConnBmlResource.setFileName(localizeResource.fileName()); - engineConnBmlResource.setFileSize(localizeResource.fileSize()); - engineConnBmlResource.setLastModified(localizeResource.lastModified()); - engineConnBmlResource.setVersion(version); - engineConnBmlResourceDao.save(engineConnBmlResource); - } else { - boolean isChanged = - resource.get().getFileSize() != localizeResource.fileSize() - || resource.get().getLastModified() != localizeResource.lastModified(); - - if (isChanged || (!isChanged && force)) { - if (!isChanged && force) { - logger.info( - "The file has no change in {}EngineConn-{}, path: {}, but force to refresh", - engineConnType, - version, - localizeResource.fileName()); - } - logger.info( - "Ready to upload a refreshed bmlResource for {}EngineConn-{}. path: {}", - engineConnType, - version, - localizeResource.fileName()); - final EngineConnBmlResource engineConnBmlResource = resource.get(); - final BmlResource bmlResource = - uploadToBml(localizeResource, engineConnBmlResource.getBmlResourceId()); - engineConnBmlResource.setBmlResourceVersion(bmlResource.getVersion()); - engineConnBmlResource.setLastUpdateTime(new Date()); - engineConnBmlResource.setFileSize(localizeResource.fileSize()); - engineConnBmlResource.setLastModified(localizeResource.lastModified()); - engineConnBmlResourceDao.update(engineConnBmlResource); - } else { - logger.info( - "The file has no change in {}EngineConn-{}, path: {}", - engineConnType, - version, - localizeResource.fileName()); - } - } - } - } - - @Receiver - @Override - public EngineConnResource getEngineConnBMLResources( - final GetEngineConnResourceRequest engineConnBMLResourceRequest) { - final String engineConnType = engineConnBMLResourceRequest.getEngineConnType(); - final String version = engineConnBMLResourceRequest.getVersion(); - - List engineConnBmlResources = - engineConnBmlResourceDao.getAllEngineConnBmlResource(engineConnType, version); - if (engineConnBmlResources.size() == 0 - && (boolean) EngineConnPluginConfiguration.EC_BML_VERSION_MAY_WITH_PREFIX_V.getValue()) { - logger.info("Try to get engine conn bml resource with prefex v"); - engineConnBmlResourceDao.getAllEngineConnBmlResource(engineConnType, "v" + version); - } - - Optional confBmlResourceMap = - engineConnBmlResources.stream() - .filter( - r -> r.getFileName().equals(LaunchConstants.ENGINE_CONN_CONF_DIR_NAME() + ".zip")) - .map(this::parseToBmlResource) - .findFirst(); - Optional libBmlResourceMap = - engineConnBmlResources.stream() - .filter( - r -> r.getFileName().equals(LaunchConstants.ENGINE_CONN_LIB_DIR_NAME() + ".zip")) - .map(this::parseToBmlResource) - .findFirst(); - - if (!confBmlResourceMap.isPresent() || !libBmlResourceMap.isPresent()) { - throw new EngineConnPluginErrorException( - EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION.getErrorCode(), - EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION.getErrorDesc()); - } - final BmlResource confBmlResource = confBmlResourceMap.get(); - final BmlResource libBmlResource = libBmlResourceMap.get(); - BmlResource[] otherBmlResources = - engineConnBmlResources.stream() - .filter( - r -> - !r.getFileName().equals(LaunchConstants.ENGINE_CONN_CONF_DIR_NAME() + ".zip") - || r.getFileName() - .equals(LaunchConstants.ENGINE_CONN_LIB_DIR_NAME() + ".zip")) - .map(this::parseToBmlResource) - .toArray(BmlResource[]::new); - - return new EngineConnResource() { - @Override - public BmlResource getConfBmlResource() { - return confBmlResource; - } - - @Override - public BmlResource getLibBmlResource() { - return libBmlResource; - } - - @Override - public BmlResource[] getOtherBmlResources() { - return otherBmlResources; - } - }; - } - - private BmlResource parseToBmlResource(final EngineConnBmlResource engineConnBmlResource) { - final BmlResource bmlResource = new BmlResource(); - bmlResource.setFileName(engineConnBmlResource.getFileName()); - bmlResource.setOwner(Utils.getJvmUser()); - bmlResource.setResourceId(engineConnBmlResource.getBmlResourceId()); - bmlResource.setVersion(engineConnBmlResource.getBmlResourceVersion()); - bmlResource.setVisibility(BmlResourceVisibility.Public); - return bmlResource; - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceRequest.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceRequest.java deleted file mode 100644 index 62f7aea661..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceRequest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -import org.apache.linkis.protocol.message.RequestMethod; -import org.apache.linkis.protocol.message.RequestProtocol; - -public abstract class EngineConnResourceRequest implements RequestProtocol, RequestMethod { - - private String engineConnType; - private String version; - - private boolean force; - - public String getEngineConnType() { - return engineConnType; - } - - public void setEngineConnType(String engineConnType) { - this.engineConnType = engineConnType; - } - - public String getVersion() { - return version; - } - - public void setVersion(String version) { - this.version = version; - } - - public boolean getForce() { - return force; - } - - public void setForce(boolean force) { - this.force = force; - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceService.java deleted file mode 100644 index 5f2fb6cf69..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/EngineConnResourceService.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -import org.apache.linkis.manager.engineplugin.common.launch.process.EngineConnResource; -import org.apache.linkis.manager.engineplugin.common.launch.process.EngineConnResourceGenerator; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; - -public abstract class EngineConnResourceService implements EngineConnResourceGenerator { - - public abstract void init(); - - public abstract void refreshAll(boolean wait, boolean force); - - public abstract boolean refresh( - RefreshEngineConnResourceRequest engineConnRefreshRequest, boolean force); - - public abstract EngineConnResource getEngineConnBMLResources( - GetEngineConnResourceRequest engineConnBMLResourceRequest); - - @Override - public EngineConnResource getEngineConnBMLResources(EngineTypeLabel engineTypeLabel) { - GetEngineConnResourceRequest engineConnBMLResourceRequest = new GetEngineConnResourceRequest(); - engineConnBMLResourceRequest.setEngineConnType(engineTypeLabel.getEngineType()); - engineConnBMLResourceRequest.setVersion(engineTypeLabel.getVersion()); - return getEngineConnBMLResources(engineConnBMLResourceRequest); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/GetEngineConnResourceRequest.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/GetEngineConnResourceRequest.java deleted file mode 100644 index 046a0dd7ec..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/GetEngineConnResourceRequest.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -public class GetEngineConnResourceRequest extends EngineConnResourceRequest { - @Override - public String method() { - return "/enginePlugin/engineConn/getResource"; - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshAllEngineConnResourceRequest.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshAllEngineConnResourceRequest.java deleted file mode 100644 index 6bd41d2599..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshAllEngineConnResourceRequest.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -import org.apache.linkis.protocol.message.RequestMethod; -import org.apache.linkis.protocol.message.RequestProtocol; - -public class RefreshAllEngineConnResourceRequest implements RequestProtocol, RequestMethod { - @Override - public String method() { - return "/enginePlugin/engineConn/refreshAll"; - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshEngineConnResourceRequest.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshEngineConnResourceRequest.java deleted file mode 100644 index f518a72059..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/RefreshEngineConnResourceRequest.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineplugin.server.service; - -public class RefreshEngineConnResourceRequest extends EngineConnResourceRequest { - @Override - public String method() { - return "/enginePlugin/engineConn/refresh"; - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java index 603641fa78..803151d534 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java @@ -21,7 +21,6 @@ import org.apache.linkis.bml.client.BmlClientFactory; import org.apache.linkis.bml.protocol.BmlResourceVersionsResponse; import org.apache.linkis.bml.protocol.Version; -import org.apache.linkis.common.utils.SecurityUtils; import org.apache.linkis.common.utils.ZipUtils; import org.apache.linkis.engineplugin.server.dao.EngineConnBmlResourceDao; import org.apache.linkis.engineplugin.server.entity.EngineConnBmlResource; @@ -29,8 +28,6 @@ import org.apache.linkis.engineplugin.server.restful.EnginePluginRestful; import org.apache.linkis.engineplugin.server.service.EnginePluginAdminService; import org.apache.linkis.engineplugin.vo.EnginePluginBMLVo; -import org.apache.linkis.manager.am.exception.AMErrorCode; -import org.apache.linkis.manager.am.exception.AMErrorException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -40,7 +37,6 @@ import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; -import java.text.MessageFormat; import java.util.List; import com.github.pagehelper.PageHelper; @@ -83,11 +79,6 @@ public List getTypeList() { @Override public void deleteEnginePluginBML(String ecType, String version, String username) { List allEngineConnBmlResource = null; - if (ecType != null && SecurityUtils.containsRelativePath(ecType)) { - throw new AMErrorException( - AMErrorCode.EC_PLUGIN_ERROR.getErrorCode(), - MessageFormat.format(AMErrorCode.EC_PLUGIN_ERROR.getErrorDesc(), ecType)); - } try { allEngineConnBmlResource = engineConnBmlResourceDao.getAllEngineConnBmlResource(ecType, version); @@ -97,20 +88,13 @@ public void deleteEnginePluginBML(String ecType, String version, String username engineConnBmlResourceDao.delete(engineConnBmlResource); }); String engineConnsHome = defaultEngineConnBmlResourceGenerator.getEngineConnsHome(); - File file = new File(engineConnsHome + "/" + ecType); - if (file.exists()) { deleteDir(file); log.info("file {} delete success", ecType); } } catch (Exception e) { - log.warn( - "deleteEnginePluginBML failed ecType:[{}] version:[{}] username:[{}]", - ecType, - version, - username, - e); + e.printStackTrace(); } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java index 9fc63ebcc8..064d61a6fb 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java @@ -24,5 +24,6 @@ public class LinkisManagerApplication { public static void main(String[] args) throws ReflectiveOperationException { LinkisBaseServerApp.main(args); + // DataWorkCloudApplication.main(args); } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/AMConfiguration.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/AMConfiguration.java index 80b433b3f2..5164542445 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/AMConfiguration.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/AMConfiguration.java @@ -23,10 +23,54 @@ import org.apache.linkis.manager.common.entity.enumeration.MaintainType; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import java.util.Optional; public class AMConfiguration { + // The configuration key for the YARN queue name. + public static final String YARN_QUEUE_NAME_CONFIG_KEY = "wds.linkis.rm.yarnqueue"; + + // Identifier for cross-queue tasks. + public static final String CROSS_QUEUE = "crossQueue"; + + // Identifier for across-cluster tasks. + public static final String ACROSS_CLUSTER_TASK = "acrossClusterTask"; + + // Identifier for priority clusters. + public static final String PRIORITY_CLUSTER = "priorityCluster"; + + // Target identifier for distinguishing target clusters. + public static final String PRIORITY_CLUSTER_TARGET = "bdp"; + + // Origin identifier for distinguishing source clusters. + public static final String PRIORITY_CLUSTER_ORIGIN = "bdap"; + + // Configuration key for the target cluster CPU threshold. + public static final String TARGET_CPU_THRESHOLD = "targetCPUThreshold"; + + // Configuration key for the target cluster memory threshold. + public static final String TARGET_MEMORY_THRESHOLD = "targetMemoryThreshold"; + + // Configuration key for the target cluster CPU percentage threshold. + public static final String TARGET_CPU_PERCENTAGE_THRESHOLD = "targetCPUPercentageThreshold"; + + // Configuration key for the target cluster memory percentage threshold. + public static final String TARGET_MEMORY_PERCENTAGE_THRESHOLD = "targetMemoryPercentageThreshold"; + + // Configuration key for the origin cluster CPU percentage threshold. + public static final String ORIGIN_CPU_PERCENTAGE_THRESHOLD = "originCPUPercentageThreshold"; + + // Configuration key for the origin cluster memory percentage threshold. + public static final String ORIGIN_MEMORY_PERCENTAGE_THRESHOLD = "originMemoryPercentageThreshold"; + + public static final double ACROSS_CLUSTER_TOTAL_MEMORY_PERCENTAGE_THRESHOLD = + CommonVars.apply("linkis.yarn.across.cluster.memory.threshold", 0.8).getValue(); + + public static final double ACROSS_CLUSTER_TOTAL_CPU_PERCENTAGE_THRESHOLD = + CommonVars.apply("linkis.yarn.across.cluster.cpu.threshold", 0.8).getValue(); + public static final CommonVars ECM_ADMIN_OPERATIONS = CommonVars.apply("wds.linkis.governance.admin.operations", ""); @@ -39,50 +83,35 @@ public class AMConfiguration { public static final CommonVars ENGINE_REUSE_MAX_TIME = CommonVars.apply("wds.linkis.manager.am.engine.reuse.max.time", new TimeType("5m")); - public static final CommonVars ENGINE_REUSE_COUNT_LIMIT = - CommonVars.apply("wds.linkis.manager.am.engine.reuse.count.limit", 2); - - public static final CommonVars NODE_STATUS_HEARTBEAT_TIME = - CommonVars.apply("wds.linkis.manager.am.node.heartbeat", new TimeType("3m")); - - public static final CommonVars NODE_HEARTBEAT_MAX_UPDATE_TIME = - CommonVars.apply("wds.linkis.manager.am.node.heartbeat", new TimeType("5m")); + public static final Integer ENGINE_REUSE_COUNT_LIMIT = + CommonVars.apply("wds.linkis.manager.am.engine.reuse.count.limit", 2).getValue(); public static final CommonVars DEFAULT_NODE_OWNER = CommonVars.apply("wds.linkis.manager.am.default.node.owner", "hadoop"); - public static final CommonVars STOP_ENGINE_WAIT = - CommonVars.apply("wds.linkis.manager.am.stop.engine.wait", new TimeType("5m")); - - public static final CommonVars STOP_EM_WAIT = - CommonVars.apply("wds.linkis.manager.am.stop.em.wait", new TimeType("5m")); - - public static final CommonVars EM_LABEL_INIT_WAIT = - CommonVars.apply("wds.linkis.manager.am.em.label.init.wait", new TimeType("5m")); - public static final CommonVars EM_NEW_WAIT_MILLS = CommonVars.apply("wds.linkis.manager.am.em.new.wait.mills", 1000 * 60L); - public static final CommonVars ENGINECONN_DEBUG_ENABLED = - CommonVars.apply("wds.linkis.engineconn.debug.mode.enable", false); - public static final CommonVars MULTI_USER_ENGINE_TYPES = CommonVars.apply( "wds.linkis.multi.user.engine.types", - "jdbc,es,presto,io_file,appconn,openlookeng,trino,nebula,hbase,doris"); + "jdbc,es,presto,io_file,appconn,openlookeng,trino,jobserver,nebula,hbase,doris"); public static final CommonVars ALLOW_BATCH_KILL_ENGINE_TYPES = CommonVars.apply("wds.linkis.allow.batch.kill.engine.types", "spark,hive,python"); + public static final CommonVars UNALLOW_BATCH_KILL_ENGINE_TYPES = + CommonVars.apply("wds.linkis.allow.batch.kill.engine.types", "trino,appconn,io_file"); public static final CommonVars MULTI_USER_ENGINE_USER = CommonVars.apply("wds.linkis.multi.user.engine.user", getDefaultMultiEngineUser()); public static final CommonVars ENGINE_LOCKER_MAX_TIME = CommonVars.apply("wds.linkis.manager.am.engine.locker.max.time", 1000 * 60 * 5); - public static final CommonVars AM_CAN_RETRY_LOGS = + public static final String AM_CAN_RETRY_LOGS = CommonVars.apply( - "wds.linkis.manager.am.can.retry.logs", "already in use;Cannot allocate memory"); + "wds.linkis.manager.am.can.retry.logs", "already in use;Cannot allocate memory") + .getValue(); public static final int ASK_ENGINE_ASYNC_MAX_THREAD_SIZE = CommonVars.apply("wds.linkis.ecm.launch.max.thread.size", 200).getValue(); @@ -102,11 +131,56 @@ public class AMConfiguration { public static final Boolean NODE_SELECT_HOTSPOT_EXCLUSION_RULE = CommonVars.apply("linkis.node.select.hotspot.exclusion.rule.enable", true).getValue(); + public static final boolean EC_REUSE_WITH_RESOURCE_RULE_ENABLE = + CommonVars.apply("linkis.ec.reuse.with.resource.rule.enable", false).getValue(); + + public static final String EC_REUSE_WITH_RESOURCE_WITH_ECS = + CommonVars.apply("linkis.ec.reuse.with.resource.with.ecs", "spark,hive,shell,python") + .getValue(); + + public static final String SUPPORT_CLUSTER_RULE_EC_TYPES = + CommonVars.apply("linkis.support.cluster.rule.ec.types", "").getValue(); + + public static final boolean HIVE_CLUSTER_EC_EXECUTE_ONCE_RULE_ENABLE = + CommonVars.apply("linkis.hive.cluster.ec.execute.once.rule.enable", true).getValue(); + + public static final String LONG_LIVED_LABEL = + CommonVars.apply("linkis.label.node.long.lived.label.keys", "tenant|yarnCluster").getValue(); + + public static final String TMP_LIVED_LABEL = + CommonVars.apply("linkis.label.node.tmp.lived.label.keys", "taskId").getValue(); + + public static final boolean COMBINED_WITHOUT_YARN_DEFAULT = + CommonVars.apply("linkis.combined.without.yarn.default", true).getValue(); + + public static final Map AM_ENGINE_ASK_MAX_NUMBER = new HashMap<>(); + + static { + String keyValue = + CommonVars.apply("linkis.am.engine.ask.max.number", "appconn=5,trino=10").getValue(); + String[] keyValuePairs = keyValue.split(","); + for (String pair : keyValuePairs) { + String[] array = pair.split("="); + if (array.length != 2) { + throw new IllegalArgumentException( + "linkis.am.engine.ask.max.number value is illegal, value is " + pair); + } else { + AM_ENGINE_ASK_MAX_NUMBER.put(array[0], Integer.parseInt(array[1])); + } + } + } + + public static final boolean AM_ECM_RESET_RESOURCE = + CommonVars.apply("linkis.am.ecm.reset.resource.enable", true).getValue(); + + public static final boolean AM_USER_RESET_RESOURCE = + CommonVars.apply("linkis.am.user.reset.resource.enable", true).getValue(); + public static String getDefaultMultiEngineUser() { String jvmUser = Utils.getJvmUser(); return String.format( - "{jdbc:\"%s\", es: \"%s\", presto:\"%s\", appconn:\"%s\", openlookeng:\"%s\", trino:\"%s\", nebula:\"%s\",doris:\"%s\", hbase:\"%s\",io_file:\"root\"}", - jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser); + "{jdbc:\"%s\", es: \"%s\", presto:\"%s\", appconn:\"%s\", openlookeng:\"%s\", trino:\"%s\", nebula:\"%s\",doris:\"%s\", hbase:\"%s\", jobserver:\"%s\",io_file:\"root\"}", + jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser, jvmUser); } public static boolean isMultiUserEngine(String engineType) { @@ -123,4 +197,14 @@ public static boolean isAllowKilledEngineType(String engineType) { Arrays.stream(allowBatchKillEngine).filter(e -> e.equalsIgnoreCase(engineType)).findFirst(); return findResult.isPresent(); } + + public static boolean isUnAllowKilledEngineType(String engineType) { + String[] unAllowBatchKillEngine = + AMConfiguration.UNALLOW_BATCH_KILL_ENGINE_TYPES.getValue().split(","); + Optional findResult = + Arrays.stream(unAllowBatchKillEngine) + .filter(e -> e.equalsIgnoreCase(engineType)) + .findFirst(); + return findResult.isPresent(); + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ApplicationManagerSpringConfiguration.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ApplicationManagerSpringConfiguration.java deleted file mode 100644 index 96d28a185b..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ApplicationManagerSpringConfiguration.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.manager.am.conf; - -import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class ApplicationManagerSpringConfiguration { - - @ConditionalOnMissingBean - @Bean - public EngineConnConfigurationService getDefaultEngineConnConfigurationService() { - return new DefaultEngineConnConfigurationService(); - } -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ConfigurationMapCache.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ConfigurationMapCache.java index 039704351e..fa9843d955 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ConfigurationMapCache.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ConfigurationMapCache.java @@ -18,7 +18,9 @@ package org.apache.linkis.manager.am.conf; import org.apache.linkis.common.conf.Configuration; -import org.apache.linkis.governance.common.protocol.conf.*; +import org.apache.linkis.governance.common.protocol.conf.RequestQueryEngineConfigWithGlobalConfig; +import org.apache.linkis.governance.common.protocol.conf.RequestQueryGlobalConfig; +import org.apache.linkis.governance.common.protocol.conf.ResponseQueryConfig; import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; import org.apache.linkis.protocol.CacheableProtocol; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/DefaultEngineConnConfigurationService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/DefaultEngineConnConfigurationService.java index 3575491087..1492c6569f 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/DefaultEngineConnConfigurationService.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/DefaultEngineConnConfigurationService.java @@ -17,11 +17,13 @@ package org.apache.linkis.manager.am.conf; -import org.apache.linkis.manager.am.util.LinkisUtils; +import org.apache.linkis.common.utils.LinkisUtils; import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; +import org.springframework.stereotype.Component; + import java.util.HashMap; import java.util.List; import java.util.Map; @@ -32,6 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +@Component class DefaultEngineConnConfigurationService implements EngineConnConfigurationService { private static final Logger logger = diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ManagerMonitorConf.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ManagerMonitorConf.java index c3a35f7921..bce581a2e9 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ManagerMonitorConf.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/conf/ManagerMonitorConf.java @@ -28,12 +28,6 @@ public class ManagerMonitorConf { public static final CommonVars NODE_HEARTBEAT_MAX_UPDATE_TIME = CommonVars.apply("wds.linkis.manager.am.node.heartbeat", new TimeType("12m")); - public static final CommonVars ENGINE_KILL_TIMEOUT = - CommonVars.apply("wds.linkis.manager.am.engine.kill.timeout", new TimeType("2m")); - - public static final CommonVars EM_KILL_TIMEOUT = - CommonVars.apply("wds.linkis.manager.am.em.kill.timeout", new TimeType("2m")); - public static final CommonVars MANAGER_MONITOR_ASYNC_POLL_SIZE = CommonVars.apply("wds.linkis.manager.monitor.async.poll.size", 5); @@ -42,4 +36,7 @@ public class ManagerMonitorConf { public static final CommonVars ECM_HEARTBEAT_MAX_UPDATE_TIME = CommonVars.apply("wds.linkis.manager.am.ecm.heartbeat", new TimeType("5m")); + + public static final CommonVars ACROSS_QUEUES_RESOURCE_SHOW_SWITCH_ON = + CommonVars.apply("wds.linkis.manager.across.resource.show.switch.on", false); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/DefaultMetricsConverter.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/DefaultMetricsConverter.java index 4736433ada..c7620157a1 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/DefaultMetricsConverter.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/DefaultMetricsConverter.java @@ -23,7 +23,6 @@ import org.apache.linkis.manager.common.entity.metrics.NodeOverLoadInfo; import org.apache.linkis.manager.common.entity.metrics.NodeTaskInfo; import org.apache.linkis.manager.common.entity.node.AMNode; -import org.apache.linkis.manager.service.common.metrics.MetricsConverter; import org.apache.linkis.server.BDPJettyServerHelper; import org.apache.commons.lang3.StringUtils; @@ -55,7 +54,7 @@ public NodeTaskInfo parseTaskInfo(NodeMetrics nodeMetrics) { return taskInfo; } } catch (IOException e) { - logger.error("parse task info failed", e); + logger.warn("parse task info failed", e); } } return null; @@ -68,7 +67,7 @@ public NodeHealthyInfo parseHealthyInfo(NodeMetrics nodeMetrics) { try { return BDPJettyServerHelper.jacksonJson().readValue(healthyInfo, NodeHealthyInfo.class); } catch (IOException e) { - logger.error("parse healthy info failed", e); + logger.warn("parse healthy info failed", e); } } return null; @@ -81,7 +80,7 @@ public NodeOverLoadInfo parseOverLoadInfo(NodeMetrics nodeMetrics) { try { return BDPJettyServerHelper.jacksonJson().readValue(overLoad, NodeOverLoadInfo.class); } catch (IOException e) { - logger.error("parse over load info failed", e); + logger.warn("parse over load info failed", e); } } return null; @@ -97,7 +96,7 @@ public String convertTaskInfo(NodeTaskInfo nodeTaskInfo) { try { return BDPJettyServerHelper.jacksonJson().writeValueAsString(nodeTaskInfo); } catch (JsonProcessingException e) { - logger.error("convert task info failed", e); + logger.warn("convert task info failed", e); } return null; } @@ -107,7 +106,7 @@ public String convertHealthyInfo(NodeHealthyInfo nodeHealthyInfo) { try { return BDPJettyServerHelper.jacksonJson().writeValueAsString(nodeHealthyInfo); } catch (JsonProcessingException e) { - logger.error("convert healthy info failed", e); + logger.warn("convert healthy info failed", e); } return null; } @@ -117,7 +116,7 @@ public String convertOverLoadInfo(NodeOverLoadInfo nodeOverLoadInfo) { try { return BDPJettyServerHelper.jacksonJson().writeValueAsString(nodeOverLoadInfo); } catch (JsonProcessingException e) { - logger.error("convert over load info failed", e); + logger.warn("convert over load info failed", e); } return null; } @@ -129,7 +128,9 @@ public int convertStatus(NodeStatus nodeStatus) { @Override public AMNode fillMetricsToNode(AMNode amNode, NodeMetrics metrics) { - if (metrics == null) return amNode; + if (metrics == null) { + return amNode; + } amNode.setNodeStatus(parseStatus(metrics)); amNode.setNodeTaskInfo(parseTaskInfo(metrics)); amNode.setNodeHealthyInfo(parseHealthyInfo(metrics)); diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/metrics/MetricsConverter.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/MetricsConverter.java similarity index 97% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/metrics/MetricsConverter.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/MetricsConverter.java index acc46510cd..e84b577f45 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/metrics/MetricsConverter.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/converter/MetricsConverter.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.metrics; +package org.apache.linkis.manager.am.converter; import org.apache.linkis.common.ServiceInstance; import org.apache.linkis.manager.common.entity.enumeration.NodeHealthy; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java index cc8997d857..c05768739c 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java @@ -32,8 +32,7 @@ public enum AMErrorCode implements LinkisErrorCode { ASK_ENGINE_ERROR_RETRY(210005, "Ask engine error, retry(请求引擎失败,重试)"), - EC_OPERATE_ERROR(210006, "Failed to execute operation(引擎操作失败)"), - EC_PLUGIN_ERROR(210007, "ECType {0} contains RelativePath"); + EC_OPERATE_ERROR(210006, "Failed to execute operation(引擎操作失败)"); private final int errorCode; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorException.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorException.java index 4f3badbb9a..727fcc3133 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorException.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorException.java @@ -17,10 +17,9 @@ package org.apache.linkis.manager.am.exception; -import org.apache.linkis.common.exception.ExceptionLevel; -import org.apache.linkis.common.exception.LinkisRuntimeException; +import org.apache.linkis.common.exception.ErrorException; -public class AMErrorException extends LinkisRuntimeException { +public class AMErrorException extends ErrorException { public AMErrorException(int errCode, String desc) { super(errCode, desc); @@ -30,9 +29,4 @@ public AMErrorException(int errCode, String desc, Throwable t) { this(errCode, desc); this.initCause(t); } - - @Override - public ExceptionLevel getLevel() { - return ExceptionLevel.ERROR; - } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelChecker.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelChecker.java index a0928db981..13e45832c8 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelChecker.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelChecker.java @@ -21,7 +21,6 @@ import org.apache.linkis.manager.label.entity.em.EMInstanceLabel; import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; -import org.apache.linkis.manager.service.common.label.LabelChecker; import org.springframework.stereotype.Component; @@ -45,7 +44,6 @@ public boolean checkEMLabel(List> labelList) { @Override public boolean checkCorrespondingLabel(List> labelList, Class... clazz) { - // TODO: 是否需要做子类的判断 List> classes = Arrays.asList(clazz); return labelList.stream() .filter(Objects::nonNull) diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelFilter.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelFilter.java index 5fa8e7db12..8820bc0be8 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelFilter.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/AMLabelFilter.java @@ -18,7 +18,9 @@ package org.apache.linkis.manager.am.label; import org.apache.linkis.governance.common.conf.GovernanceCommonConf; -import org.apache.linkis.manager.label.entity.*; +import org.apache.linkis.manager.label.entity.EMNodeLabel; +import org.apache.linkis.manager.label.entity.EngineNodeLabel; +import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.manager.label.entity.node.AliasServiceInstanceLabel; import org.apache.linkis.manager.service.common.label.LabelFilter; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/label/LabelChecker.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/LabelChecker.java similarity index 95% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/label/LabelChecker.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/LabelChecker.java index 87dab1f333..0c1e27d086 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/label/LabelChecker.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/LabelChecker.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.label; +package org.apache.linkis.manager.am.label; import org.apache.linkis.manager.label.entity.Label; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.java index 4fbe6894f4..b8ed766072 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.java @@ -38,8 +38,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static scala.collection.JavaConverters.*; - @Component public class MultiUserEngineReuseLabelChooser implements EngineReuseLabelChooser { private static final Logger logger = diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/DefaultEngineNodeLocker.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/DefaultEngineNodeLocker.java index 00adf1492b..fe93a03411 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/DefaultEngineNodeLocker.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/locker/DefaultEngineNodeLocker.java @@ -17,13 +17,13 @@ package org.apache.linkis.manager.am.locker; +import org.apache.linkis.manager.am.pointer.NodePointerBuilder; import org.apache.linkis.manager.common.entity.node.AMEngineNode; import org.apache.linkis.manager.common.entity.node.EngineNode; import org.apache.linkis.manager.common.protocol.RequestEngineLock; import org.apache.linkis.manager.common.protocol.RequestEngineUnlock; import org.apache.linkis.manager.common.protocol.RequestManagerUnlock; import org.apache.linkis.manager.common.protocol.engine.EngineLockType; -import org.apache.linkis.manager.service.common.pointer.NodePointerBuilder; import org.apache.linkis.rpc.message.annotation.Receiver; import org.springframework.beans.factory.annotation.Autowired; @@ -58,14 +58,14 @@ public void releaseLock(RequestManagerUnlock requestManagerUnlock) { logger.info( String.format( "client%s Start to unlock engine %s", - requestManagerUnlock.getClientInstance(), requestManagerUnlock.getEngineInstance())); + requestManagerUnlock.clientInstance(), requestManagerUnlock.engineInstance())); AMEngineNode engineNode = new AMEngineNode(); - engineNode.setServiceInstance(requestManagerUnlock.getEngineInstance()); - releaseLock(engineNode, requestManagerUnlock.getLock()); + engineNode.setServiceInstance(requestManagerUnlock.engineInstance()); + releaseLock(engineNode, requestManagerUnlock.lock()); logger.info( String.format( "client%s Finished to unlock engine %s", - requestManagerUnlock.getClientInstance(), requestManagerUnlock.getEngineInstance())); + requestManagerUnlock.clientInstance(), requestManagerUnlock.engineInstance())); } catch (Exception e) { logger.error("release lock failed", e); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEMNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEMNodeManager.java index cf66e88e5d..691aa635a4 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEMNodeManager.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEMNodeManager.java @@ -18,6 +18,8 @@ package org.apache.linkis.manager.am.manager; import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.manager.am.converter.MetricsConverter; +import org.apache.linkis.manager.am.pointer.NodePointerBuilder; import org.apache.linkis.manager.common.entity.metrics.NodeMetrics; import org.apache.linkis.manager.common.entity.node.*; import org.apache.linkis.manager.common.entity.persistence.PersistenceNodeEntity; @@ -30,8 +32,6 @@ import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence; import org.apache.linkis.manager.rm.ResourceInfo; import org.apache.linkis.manager.rm.service.ResourceManager; -import org.apache.linkis.manager.service.common.metrics.MetricsConverter; -import org.apache.linkis.manager.service.common.pointer.NodePointerBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -150,7 +150,7 @@ public EMNode[] getEMNodes(ScoreServiceInstance[] scoreServiceInstances) { .filter(metrics -> metrics.getServiceInstance().equals(emNode.getServiceInstance())) .findFirst(); Optional optionRMNode = - resourceInfo.getResourceInfo().stream() + resourceInfo.resourceInfo().stream() .filter(rmNode -> rmNode.getServiceInstance().equals(emNode.getServiceInstance())) .findFirst(); @@ -171,7 +171,7 @@ public EMNode getEM(ServiceInstance serviceInstance) { emNode.setOwner(node.getOwner()); emNode.setServiceInstance(node.getServiceInstance()); if (node instanceof PersistenceNodeEntity) { - emNode.setStartTime(((PersistenceNodeEntity) node).getStartTime()); + emNode.setStartTime(node.getStartTime()); } emNode.setMark(emNode.getMark()); metricsConverter.fillMetricsToNode(emNode, nodeMetricManagerPersistence.getNodeMetrics(emNode)); @@ -197,7 +197,7 @@ public void pauseEM(ServiceInstance serviceInstance) {} /** * 1. request engineManager to launch engine * - * @param engineBuildRequest + * @param engineConnLaunchRequest * @param emNode * @return */ diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.java index 14d548ef77..02b143d5cd 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.java @@ -20,11 +20,12 @@ import org.apache.linkis.common.ServiceInstance; import org.apache.linkis.common.exception.LinkisRetryException; import org.apache.linkis.manager.am.conf.AMConfiguration; +import org.apache.linkis.manager.am.converter.MetricsConverter; import org.apache.linkis.manager.am.exception.AMErrorCode; import org.apache.linkis.manager.am.exception.AMErrorException; import org.apache.linkis.manager.am.locker.EngineNodeLocker; -import org.apache.linkis.manager.am.utils.DefaultRetryHandler; -import org.apache.linkis.manager.am.utils.RetryHandler; +import org.apache.linkis.manager.am.pointer.EngineNodePointer; +import org.apache.linkis.manager.am.pointer.NodePointerBuilder; import org.apache.linkis.manager.common.constant.AMConstant; import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; import org.apache.linkis.manager.common.entity.metrics.NodeMetrics; @@ -41,11 +42,12 @@ import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence; import org.apache.linkis.manager.rm.ResourceInfo; import org.apache.linkis.manager.rm.service.ResourceManager; -import org.apache.linkis.manager.service.common.metrics.MetricsConverter; -import org.apache.linkis.manager.service.common.pointer.EngineNodePointer; -import org.apache.linkis.manager.service.common.pointer.NodePointerBuilder; + +import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.retry.annotation.Backoff; +import org.springframework.retry.annotation.Retryable; import org.springframework.stereotype.Service; import java.lang.reflect.UndeclaredThrowableException; @@ -104,9 +106,12 @@ public List listEngines(String user) { return nodes; } + @Retryable( + value = {feign.RetryableException.class, UndeclaredThrowableException.class}, + maxAttempts = 5, + backoff = @Backoff(delay = 10000)) @Override public EngineNode getEngineNodeInfo(EngineNode engineNode) { - /** Change the EngineNode to correspond to real-time requests?(修改为实时请求对应的EngineNode) */ EngineNodePointer engine = nodePointerBuilder.buildEngineNodePointer(engineNode); NodeHeartbeatMsg heartMsg = engine.getNodeHeartbeatMsg(); engineNode.setNodeHealthyInfo(heartMsg.getHealthyInfo()); @@ -155,7 +160,7 @@ public EngineNode switchEngine(EngineNode engineNode) { @Override public EngineNode reuseEngine(EngineNode engineNode) { EngineNode node = getEngineNodeInfo(engineNode); - if (!NodeStatus.isAvailable(node.getNodeStatus())) { + if (node == null || !NodeStatus.isAvailable(node.getNodeStatus())) { return null; } if (!NodeStatus.isLocked(node.getNodeStatus())) { @@ -183,22 +188,8 @@ public EngineNode reuseEngine(EngineNode engineNode) { */ @Override public EngineNode useEngine(EngineNode engineNode, long timeout) { - RetryHandler retryHandler = new DefaultRetryHandler(); - retryHandler.addRetryException(feign.RetryableException.class); - retryHandler.addRetryException(UndeclaredThrowableException.class); // wait until engine to be available - EngineNode node = retryHandler.retry(() -> getEngineNodeInfo(engineNode), "getEngineNodeInfo"); - long retryEndTime = System.currentTimeMillis() + 60 * 1000; - while ((node == null || !NodeStatus.isAvailable(node.getNodeStatus())) - && System.currentTimeMillis() < retryEndTime) { - node = retryHandler.retry(() -> getEngineNodeInfo(engineNode), "getEngineNodeInfo"); - try { - Thread.sleep(5 * 1000); - } catch (InterruptedException e) { - // ignore - } - } - + EngineNode node = getEngineNodeInfo(engineNode); if (node == null || !NodeStatus.isAvailable(node.getNodeStatus())) { return null; } @@ -216,8 +207,13 @@ public EngineNode useEngine(EngineNode engineNode, long timeout) { } } + @Override + public EngineNode useEngine(EngineNode engineNode) { + return useEngine(engineNode, AMConfiguration.ENGINE_LOCKER_MAX_TIME.getValue()); + } + /** - * Get detailed engine information from the persistence //TODO 是否增加owner到node + * Get detailed engine information from the persistence * * @param scoreServiceInstances * @return @@ -227,8 +223,9 @@ public EngineNode[] getEngineNodes(ScoreServiceInstance[] scoreServiceInstances) if (scoreServiceInstances == null || scoreServiceInstances.length == 0) { return null; } + List scoreServiceInstancesList = Arrays.asList(scoreServiceInstances); EngineNode[] engineNodes = - Arrays.stream(scoreServiceInstances) + scoreServiceInstancesList.stream() .map( scoreServiceInstance -> { AMEngineNode engineNode = new AMEngineNode(); @@ -237,42 +234,48 @@ public EngineNode[] getEngineNodes(ScoreServiceInstance[] scoreServiceInstances) return engineNode; }) .toArray(EngineNode[]::new); - // 1. add nodeMetrics 2 add RM info - ServiceInstance[] serviceInstances = - Arrays.stream(scoreServiceInstances) + + List serviceInstancesList = + scoreServiceInstancesList.stream() .map(ScoreServiceInstance::getServiceInstance) - .toArray(ServiceInstance[]::new); - ResourceInfo resourceInfo = resourceManager.getResourceInfo(serviceInstances); - - List nodeMetrics = - nodeMetricManagerPersistence.getNodeMetrics( - Arrays.stream(engineNodes).collect(Collectors.toList())); - Arrays.stream(engineNodes) - .forEach( - engineNode -> { - Optional optionMetrics = - nodeMetrics.stream() - .filter( - nodeMetric -> - nodeMetric - .getServiceInstance() - .equals(engineNode.getServiceInstance())) - .findFirst(); - - Optional optionRMNode = - resourceInfo.getResourceInfo().stream() - .filter( - resourceNode -> - resourceNode - .getServiceInstance() - .equals(engineNode.getServiceInstance())) - .findFirst(); - - optionMetrics.ifPresent( - metrics -> metricsConverter.fillMetricsToNode(engineNode, metrics)); - optionRMNode.ifPresent( - rmNode -> engineNode.setNodeResource(rmNode.getNodeResource())); - }); + .collect(Collectors.toList()); + + try { + ResourceInfo resourceInfo = + resourceManager.getResourceInfo(serviceInstancesList.toArray(new ServiceInstance[0])); + + if (serviceInstancesList.isEmpty()) { + throw new LinkisRetryException( + AMConstant.ENGINE_ERROR_CODE, "Service instances cannot be empty."); + } + + List nodeMetrics = + nodeMetricManagerPersistence.getNodeMetrics(Arrays.asList(engineNodes)); + + for (EngineNode engineNode : engineNodes) { + Optional optionMetrics = + nodeMetrics.stream() + .filter( + nodeMetric -> + nodeMetric.getServiceInstance().equals(engineNode.getServiceInstance())) + .findFirst(); + + Optional optionRMNode = + resourceInfo.resourceInfo().stream() + .filter( + resourceNode -> + resourceNode.getServiceInstance().equals(engineNode.getServiceInstance())) + .findFirst(); + + optionMetrics.ifPresent(metrics -> metricsConverter.fillMetricsToNode(engineNode, metrics)); + optionRMNode.ifPresent(rmNode -> engineNode.setNodeResource(rmNode.getNodeResource())); + } + } catch (Exception e) { + LinkisRetryException linkisRetryException = + new LinkisRetryException(AMConstant.ENGINE_ERROR_CODE, "Failed to process data."); + linkisRetryException.initCause(e); + throw linkisRetryException; + } return engineNodes; } @@ -349,14 +352,16 @@ public EngineNode getEngineNodeInfo(ServiceInstance serviceInstance) { AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorCode(), AMErrorCode.NOT_EXISTS_ENGINE_CONN.getErrorDesc()); } - + NodeMetrics nodeMetric = nodeMetricManagerPersistence.getNodeMetrics(engineNode); if (engineNode.getNodeStatus() == null) { - NodeMetrics nodeMetric = nodeMetricManagerPersistence.getNodeMetrics(engineNode); - if (Objects.nonNull(nodeMetric) && Objects.nonNull(nodeMetric.getStatus())) { + if (null != nodeMetric && null != nodeMetric.getStatus()) { engineNode.setNodeStatus(NodeStatus.values()[nodeMetric.getStatus()]); } else { engineNode.setNodeStatus(NodeStatus.Starting); } + if (null != nodeMetric && StringUtils.isNotBlank(nodeMetric.getHeartBeatMsg())) { + engineNode.setEcMetrics(nodeMetric.getHeartBeatMsg()); + } } return engineNode; } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EMNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EMNodeManager.java index 8cb8362ecc..cf540036a3 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EMNodeManager.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EMNodeManager.java @@ -20,6 +20,7 @@ import org.apache.linkis.common.ServiceInstance; import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.manager.common.entity.node.Node; import org.apache.linkis.manager.common.entity.node.ScoreServiceInstance; import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; import org.apache.linkis.manager.common.protocol.em.ECMOperateResponse; @@ -32,11 +33,11 @@ public interface EMNodeManager { void emRegister(EMNode emNode); - List listEngines(EMNode emNode); + List listEngines(EMNode emNode); - List listUserEngines(EMNode emNode, String user); + List listUserEngines(EMNode emNode, String user); - List listUserNodes(String user); + List listUserNodes(String user); /** * Get detailed em information from the persistence diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EngineNodeManager.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EngineNodeManager.java index ce79d79c7e..7c3f64efee 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EngineNodeManager.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/manager/EngineNodeManager.java @@ -65,5 +65,7 @@ void updateEngineStatus( EngineNode useEngine(EngineNode engineNode, long timeout); + EngineNode useEngine(EngineNode engineNode); + EngineOperateResponse executeOperation(EngineNode engineNode, EngineOperateRequest request); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/AbstractNodePointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/AbstractNodePointer.java index 80390d0883..fc0a1fca24 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/AbstractNodePointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/AbstractNodePointer.java @@ -17,12 +17,10 @@ package org.apache.linkis.manager.am.pointer; -import org.apache.linkis.common.exception.WarnException; import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; import org.apache.linkis.manager.common.entity.node.Node; import org.apache.linkis.manager.common.protocol.node.*; import org.apache.linkis.manager.label.entity.Label; -import org.apache.linkis.manager.service.common.pointer.NodePointer; import org.apache.linkis.rpc.Sender; public abstract class AbstractNodePointer implements NodePointer { @@ -39,12 +37,8 @@ protected Sender getSender() { @Override public NodeStatus getNodeStatus() { Sender sender = getSender(); - try { - ResponseNodeStatus responseStatus = (ResponseNodeStatus) sender.ask(new RequestNodeStatus()); - return responseStatus.getNodeStatus(); - } catch (WarnException e) { - throw e; - } + ResponseNodeStatus responseStatus = (ResponseNodeStatus) sender.ask(new RequestNodeStatus()); + return responseStatus.getNodeStatus(); } /** @@ -55,12 +49,8 @@ public NodeStatus getNodeStatus() { @Override public NodeHeartbeatMsg getNodeHeartbeatMsg() { Sender sender = getSender(); - try { - NodeHeartbeatMsg heartbeatMsg = (NodeHeartbeatMsg) sender.ask(new NodeHeartbeatRequest()); - return heartbeatMsg; - } catch (WarnException e) { - throw e; - } + NodeHeartbeatMsg heartbeatMsg = (NodeHeartbeatMsg) sender.ask(new NodeHeartbeatRequest()); + return heartbeatMsg; } /** diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.java index 1458680c98..07097fcb0b 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.java @@ -27,7 +27,6 @@ import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; import org.apache.linkis.manager.common.protocol.engine.EngineStopResponse; import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest; -import org.apache.linkis.manager.service.common.pointer.EMNodPointer; import org.apache.linkis.server.BDPJettyServerHelper; import org.slf4j.Logger; @@ -73,28 +72,39 @@ public EngineNode createEngine(EngineConnLaunchRequest engineConnLaunchRequest) @Override public void stopEngine(EngineStopRequest engineStopRequest) { - Object result = getSender().ask(engineStopRequest); - if (result instanceof EngineStopResponse) { - EngineStopResponse engineStopResponse = (EngineStopResponse) result; - if (!engineStopResponse.getStopStatus()) { - logger.info( - "Kill engine : " - + engineStopRequest.getServiceInstance().toString() - + " failed, because " - + engineStopResponse.getMsg() - + " . Will ask engine to suicide."); + try { + Object result = getSender().ask(engineStopRequest); + if (result instanceof EngineStopResponse) { + EngineStopResponse engineStopResponse = (EngineStopResponse) result; + if (!engineStopResponse.getStopStatus()) { + logger.info( + "Kill engine : " + + engineStopRequest.getServiceInstance().toString() + + " failed, because " + + engineStopResponse.getMsg() + + " . Will ask engine to suicide."); + } else { + logger.info( + "Succeed to kill engine " + engineStopRequest.getServiceInstance().toString() + "."); + } } else { - logger.info( - "Succeed to kill engine " + engineStopRequest.getServiceInstance().toString() + "."); + logger.warn( + "Ask em : " + + getNode().getServiceInstance().toString() + + " to kill engine : " + + engineStopRequest.getServiceInstance().toString() + + " failed, response is : " + + BDPJettyServerHelper.gson().toJson(result) + + "."); } - } else { + } catch (Exception e) { logger.warn( "Ask em : " + getNode().getServiceInstance().toString() + " to kill engine : " + engineStopRequest.getServiceInstance().toString() - + " failed, response is : " - + BDPJettyServerHelper.gson().toJson(result) + + " failed, exception is : " + + e.getMessage() + "."); } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.java index 21f86f83d1..cdbbcbbf09 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.java @@ -24,7 +24,6 @@ import org.apache.linkis.manager.common.protocol.ResponseEngineLock; import org.apache.linkis.manager.common.protocol.engine.EngineOperateRequest; import org.apache.linkis.manager.common.protocol.engine.EngineOperateResponse; -import org.apache.linkis.manager.service.common.pointer.EngineNodePointer; import java.util.Optional; @@ -50,14 +49,11 @@ public Optional lockEngine(RequestEngineLock requestEngineLock) { Object result = getSender().ask(requestEngineLock); if (result instanceof ResponseEngineLock) { ResponseEngineLock responseEngineLock = (ResponseEngineLock) result; - if (responseEngineLock.getLockStatus()) { - return Optional.of(responseEngineLock.getLock()); + if (responseEngineLock.lockStatus()) { + return Optional.of(responseEngineLock.lock()); } else { logger.info( - "Failed to get locker," - + node.getServiceInstance() - + ": " - + responseEngineLock.getMsg()); + "Failed to get locker," + node.getServiceInstance() + ": " + responseEngineLock.msg()); return Optional.empty(); } } else { diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultNodePointerBuilder.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultNodePointerBuilder.java index 03e0bc8087..58ec1ae940 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultNodePointerBuilder.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/DefaultNodePointerBuilder.java @@ -19,9 +19,6 @@ import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; -import org.apache.linkis.manager.service.common.pointer.EMNodPointer; -import org.apache.linkis.manager.service.common.pointer.EngineNodePointer; -import org.apache.linkis.manager.service.common.pointer.NodePointerBuilder; import org.springframework.stereotype.Component; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EMNodPointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EMNodPointer.java similarity index 96% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EMNodPointer.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EMNodPointer.java index 6574563003..a85bf6eaca 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EMNodPointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EMNodPointer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.pointer; +package org.apache.linkis.manager.am.pointer; import org.apache.linkis.manager.common.entity.node.EngineNode; import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EngineNodePointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EngineNodePointer.java similarity index 96% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EngineNodePointer.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EngineNodePointer.java index cc24b746af..8be00a09c2 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/EngineNodePointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/EngineNodePointer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.pointer; +package org.apache.linkis.manager.am.pointer; import org.apache.linkis.manager.common.protocol.RequestEngineLock; import org.apache.linkis.manager.common.protocol.RequestEngineUnlock; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointer.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointer.java similarity index 96% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointer.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointer.java index ca27bf8194..e5d519873e 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointer.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.pointer; +package org.apache.linkis.manager.am.pointer; import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; import org.apache.linkis.manager.common.entity.node.Node; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointerBuilder.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointerBuilder.java similarity index 94% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointerBuilder.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointerBuilder.java index e993a724b1..649e142399 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/service/common/pointer/NodePointerBuilder.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/pointer/NodePointerBuilder.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.service.common.pointer; +package org.apache.linkis.manager.am.pointer; import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java index c3c14a8094..70caae1a8d 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java @@ -71,6 +71,7 @@ public class ECResourceInfoRestfulApi { public Message getECInfo( HttpServletRequest req, @RequestParam(value = "ticketid") String ticketid) throws AMErrorException { + logger.info("ticked: {} get ec info", ticketid); ECResourceInfoRecord ecResourceInfoRecord = ecResourceInfoService.getECResourceInfoRecord(ticketid); String userName = ModuleUserUtils.getOperationUser(req, "getECInfo ticketid:" + ticketid); @@ -87,7 +88,7 @@ public Message getECInfo( @ApiImplicitParams({ @ApiImplicitParam(name = "ticketid", required = true, dataType = "String", value = "ticket id") }) - @RequestMapping(path = "/delete/{ticketid}", method = RequestMethod.DELETE) + @RequestMapping(path = "/delete/{ticketid}}", method = RequestMethod.DELETE) public Message deleteECInfo(HttpServletRequest req, @PathVariable("ticketid") String ticketid) throws AMErrorException { ECResourceInfoRecord ecResourceInfoRecord = @@ -113,6 +114,7 @@ public Message deleteECInfo(HttpServletRequest req, @PathVariable("ticketid") St @ApiImplicitParam(name = "startDate", dataType = "String", value = "start date"), @ApiImplicitParam(name = "endDate", dataType = "String", value = "end date"), @ApiImplicitParam(name = "engineType", dataType = "String", value = "engine type"), + @ApiImplicitParam(name = "status", dataType = "String", value = "engine status"), @ApiImplicitParam(name = "pageNow", dataType = "String", value = "page now"), @ApiImplicitParam(name = "pageSize", dataType = "String", value = "page size") }) @@ -131,6 +133,7 @@ public Message queryEcrHistory( defaultValue = "#{new java.util.Date()}") Date endDate, @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "status", required = false) String status, @RequestParam(value = "pageNow", required = false, defaultValue = "1") Integer pageNow, @RequestParam(value = "pageSize", required = false, defaultValue = "20") Integer pageSize) { String username = SecurityFilter.getLoginUsername(req); @@ -138,6 +141,7 @@ public Message queryEcrHistory( instance = ECResourceInfoUtils.strCheckAndDef(instance, null); String creatorUser = ECResourceInfoUtils.strCheckAndDef(creator, null); engineType = ECResourceInfoUtils.strCheckAndDef(engineType, null); + status = ECResourceInfoUtils.strCheckAndDef(status, null); if (null != creatorUser && !ECResourceInfoUtils.checkNameValid(creatorUser)) { return Message.error("Invalid creator : " + creatorUser); } @@ -148,7 +152,7 @@ public Message queryEcrHistory( calendar.set(Calendar.SECOND, 0); startDate = calendar.getTime(); } - if (Configuration.isAdmin(username)) { + if (Configuration.isJobHistoryAdmin(username)) { username = null; if (StringUtils.isNotBlank(creatorUser)) { username = creatorUser; @@ -161,7 +165,7 @@ public Message queryEcrHistory( try { queryTasks = ecResourceInfoService.getECResourceInfoRecordList( - instance, endDate, startDate, username, engineType); + instance, endDate, startDate, username, engineType, status); queryTasks.forEach( info -> { ECResourceInfoRecordVo ecrHistroryListVo = new ECResourceInfoRecordVo(); @@ -188,25 +192,51 @@ public Message queryEcrHistory( @ApiImplicitParam(name = "creators", dataType = "Array", required = true, value = "creators"), @ApiImplicitParam(name = "engineTypes", dataType = "Array", value = "engine type"), @ApiImplicitParam(name = "statuss", dataType = "Array", value = "statuss"), + @ApiImplicitParam(name = "queueName", dataType = "String", value = "queueName"), + @ApiImplicitParam(name = "ecInstances", dataType = "Array", value = "ecInstances"), + @ApiImplicitParam(name = "crossCluster", dataType = "String", value = "crossCluster"), }) @RequestMapping(path = "/ecList", method = RequestMethod.POST) public Message queryEcList(HttpServletRequest req, @RequestBody JsonNode jsonNode) { + String username = ModuleUserUtils.getOperationUser(req, "ecList"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to query ecList.", token); + return Message.error("Token:" + token + " has no permission to query ecList."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to query ecList.", username); + return Message.error("User:" + username + " has no permission to query ecList."); + } JsonNode creatorsParam = jsonNode.get("creators"); JsonNode engineTypesParam = jsonNode.get("engineTypes"); JsonNode statussParam = jsonNode.get("statuss"); + JsonNode queueNameParam = jsonNode.get("queueName"); + JsonNode ecInstancesParam = jsonNode.get("ecInstances"); + JsonNode crossClusterParam = jsonNode.get("crossCluster"); - if (creatorsParam == null || creatorsParam.isNull() || creatorsParam.size() == 0) { - return Message.error("creators is null in the parameters of the request(请求参数中【creators】为空)"); - } + // if (creatorsParam == null || creatorsParam.isNull() || creatorsParam.size() == 0) { + // return Message.error("creators is null in the parameters of the + // request(请求参数中【creators】为空)"); + // } List creatorUserList = new ArrayList<>(); - try { - creatorUserList = - JsonUtils.jackson() - .readValue(creatorsParam.toString(), new TypeReference>() {}); - } catch (JsonProcessingException e) { - return Message.error("parameters:creators parsing failed(请求参数【creators】解析失败)"); + if (creatorsParam != null && !creatorsParam.isNull()) { + try { + creatorUserList = + JsonUtils.jackson() + .readValue(creatorsParam.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:creators parsing failed(请求参数【creators】解析失败)"); + } + for (String creatorUser : creatorUserList) { + if (null != creatorUser && !ECResourceInfoUtils.checkNameValid(creatorUser)) { + return Message.error("Invalid creator: " + creatorUser); + } + } } List engineTypeList = new ArrayList<>(); @@ -230,34 +260,52 @@ public Message queryEcList(HttpServletRequest req, @RequestBody JsonNode jsonNod return Message.error("parameters:statuss parsing failed(请求参数【statuss】解析失败)"); } } - - String username = ModuleUserUtils.getOperationUser(req, "ecList"); - - String token = ModuleUserUtils.getToken(req); - // check special admin token - if (StringUtils.isNotBlank(token)) { - if (!Configuration.isAdminToken(token)) { - return Message.error("Token has no permission to query ecList."); + String queueName = ""; + if (queueNameParam != null && !queueNameParam.isNull()) { + try { + queueName = + JsonUtils.jackson() + .readValue(queueNameParam.toString(), new TypeReference() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:queueName parsing failed(请求参数【queueName】解析失败)"); } - } else if (!Configuration.isAdmin(username)) { - logger.warn("User:{} has no permission to query ecList.", username); - return Message.error("User:" + username + " has no permission to query ecList."); } - - for (String creatorUser : creatorUserList) { - if (null != creatorUser && !ECResourceInfoUtils.checkNameValid(creatorUser)) { - return Message.error("Invalid creator: " + creatorUser); + List ecInstancesList = new ArrayList<>(); + if (ecInstancesParam != null && !ecInstancesParam.isNull()) { + try { + ecInstancesList = + JsonUtils.jackson() + .readValue(ecInstancesParam.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:instanceName parsing failed(请求参数【ecInstances】解析失败)"); + } + } + Boolean isCrossCluster = null; + if (crossClusterParam != null && !crossClusterParam.isNull()) { + try { + isCrossCluster = + JsonUtils.jackson() + .readValue(crossClusterParam.toString(), new TypeReference() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:crossCluster parsing failed(请求参数【crossCluster】解析失败)"); } } - logger.info( - "request parameters creatorUserList:[{}], engineTypeList:[{}], statusStrList:[{}]", + "request parameters creatorUserList:[{}], engineTypeList:[{}], statusStrList:[{}], queueName:{}, instanceNameList:{}", String.join(",", creatorUserList), String.join(",", engineTypeList), - String.join(",", statusStrList)); + String.join(",", statusStrList), + String.join(",", ecInstancesList), + queueNameParam); List> list = - ecResourceInfoService.getECResourceInfoList(creatorUserList, engineTypeList, statusStrList); + ecResourceInfoService.getECResourceInfoList( + creatorUserList, + engineTypeList, + statusStrList, + queueName, + ecInstancesList, + isCrossCluster); return Message.ok().data("ecList", list); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java index 4d5cb480d3..414a11bf13 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java @@ -28,22 +28,35 @@ import org.apache.linkis.manager.am.service.ECResourceInfoService; import org.apache.linkis.manager.am.service.em.ECMOperateService; import org.apache.linkis.manager.am.service.em.EMInfoService; +import org.apache.linkis.manager.am.service.engine.DefaultEngineCreateService; import org.apache.linkis.manager.am.utils.AMUtils; +import org.apache.linkis.manager.am.vo.CanCreateECRes; import org.apache.linkis.manager.am.vo.EMNodeVo; import org.apache.linkis.manager.common.entity.enumeration.NodeHealthy; import org.apache.linkis.manager.common.entity.metrics.NodeHealthyInfo; import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; import org.apache.linkis.manager.common.entity.persistence.ECResourceInfoRecord; -import org.apache.linkis.manager.common.protocol.OperateRequest; +import org.apache.linkis.manager.common.exception.RMErrorException; +import org.apache.linkis.manager.common.protocol.OperateRequest$; import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; +import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest$; import org.apache.linkis.manager.common.protocol.em.ECMOperateResponse; +import org.apache.linkis.manager.common.protocol.engine.EngineCreateRequest; +import org.apache.linkis.manager.exception.PersistenceErrorException; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactory; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext; import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.manager.label.entity.UserModifiable; +import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; +import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; import org.apache.linkis.manager.label.exception.LabelErrorException; import org.apache.linkis.manager.label.service.NodeLabelService; +import org.apache.linkis.manager.label.utils.EngineTypeLabelCreator; +import org.apache.linkis.manager.persistence.LabelManagerPersistence; +import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence; +import org.apache.linkis.manager.persistence.ResourceManagerPersistence; +import org.apache.linkis.manager.rm.external.service.ExternalResourceService; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; @@ -58,6 +71,7 @@ import javax.servlet.http.HttpServletRequest; +import java.text.MessageFormat; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -93,6 +107,17 @@ public class EMRestfulApi { @Autowired private ECMOperateService ecmOperateService; @Autowired private ECResourceInfoService ecResourceInfoService; + + @Autowired private ResourceManagerPersistence resourceManagerPersistence; + + @Autowired private LabelManagerPersistence labelManagerPersistence; + + @Autowired private ExternalResourceService externalResourceService; + + @Autowired private DefaultEngineCreateService defaultEngineCreateService; + + @Autowired private NodeMetricManagerPersistence nodeMetricManagerPersistence; + private LabelBuilderFactory stdLabelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory(); @@ -148,7 +173,7 @@ public Message listAllEMs( stream = stream.filter( em -> { - List> labels = em.getLabels(); + List