提交 7ac5d815 authored 作者: songchuancai's avatar songchuancai

Initial commit

上级
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**
!**/src/test/**
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
### VS Code ###
.vscode/
/*
* Copyright 2007-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.net.*;
import java.io.*;
import java.nio.channels.*;
import java.util.Properties;
public class MavenWrapperDownloader {
private static final String WRAPPER_VERSION = "0.5.6";
/**
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
*/
private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
+ WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
/**
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
* use instead of the default one.
*/
private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
".mvn/wrapper/maven-wrapper.properties";
/**
* Path where the maven-wrapper.jar will be saved to.
*/
private static final String MAVEN_WRAPPER_JAR_PATH =
".mvn/wrapper/maven-wrapper.jar";
/**
* Name of the property which should be used to override the default download url for the wrapper.
*/
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
public static void main(String args[]) {
System.out.println("- Downloader started");
File baseDirectory = new File(args[0]);
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
// If the maven-wrapper.properties exists, read it and check if it contains a custom
// wrapperUrl parameter.
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
String url = DEFAULT_DOWNLOAD_URL;
if (mavenWrapperPropertyFile.exists()) {
FileInputStream mavenWrapperPropertyFileInputStream = null;
try {
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
Properties mavenWrapperProperties = new Properties();
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
} catch (IOException e) {
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
} finally {
try {
if (mavenWrapperPropertyFileInputStream != null) {
mavenWrapperPropertyFileInputStream.close();
}
} catch (IOException e) {
// Ignore ...
}
}
}
System.out.println("- Downloading from: " + url);
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
if (!outputFile.getParentFile().exists()) {
if (!outputFile.getParentFile().mkdirs()) {
System.out.println(
"- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
}
}
System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
try {
downloadFileFromURL(url, outputFile);
System.out.println("Done");
System.exit(0);
} catch (Throwable e) {
System.out.println("- Error downloading");
e.printStackTrace();
System.exit(1);
}
}
private static void downloadFileFromURL(String urlString, File destination) throws Exception {
if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
String username = System.getenv("MVNW_USERNAME");
char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
Authenticator.setDefault(new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(username, password);
}
});
}
URL website = new URL(urlString);
ReadableByteChannel rbc;
rbc = Channels.newChannel(website.openStream());
FileOutputStream fos = new FileOutputStream(destination);
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
fos.close();
rbc.close();
}
}
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
# 工程简介
# 延伸阅读
差异被折叠。
@REM ----------------------------------------------------------------------------
@REM Licensed to the Apache Software Foundation (ASF) under one
@REM or more contributor license agreements. See the NOTICE file
@REM distributed with this work for additional information
@REM regarding copyright ownership. The ASF licenses this file
@REM to you under the Apache License, Version 2.0 (the
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
@REM https://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@REM KIND, either express or implied. See the License for the
@REM specific language governing permissions and limitations
@REM under the License.
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
@REM Maven Start Up Batch script
@REM
@REM Required ENV vars:
@REM JAVA_HOME - location of a JDK home dir
@REM
@REM Optional ENV vars
@REM M2_HOME - location of maven2's installed home dir
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
@REM e.g. to debug Maven itself, use
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
@REM ----------------------------------------------------------------------------
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
@echo off
@REM set title of command window
title %0
@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
@REM set %HOME% to equivalent of $HOME
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
@REM Execute a user defined script before this one
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
:skipRcPre
@setlocal
set ERROR_CODE=0
@REM To isolate internal variables from possible post scripts, we use another setlocal
@setlocal
@REM ==== START VALIDATION ====
if not "%JAVA_HOME%" == "" goto OkJHome
echo.
echo Error: JAVA_HOME not found in your environment. >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
:OkJHome
if exist "%JAVA_HOME%\bin\java.exe" goto init
echo.
echo Error: JAVA_HOME is set to an invalid directory. >&2
echo JAVA_HOME = "%JAVA_HOME%" >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
@REM ==== END VALIDATION ====
:init
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
@REM Fallback to current working directory if not found.
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
set EXEC_DIR=%CD%
set WDIR=%EXEC_DIR%
:findBaseDir
IF EXIST "%WDIR%"\.mvn goto baseDirFound
cd ..
IF "%WDIR%"=="%CD%" goto baseDirNotFound
set WDIR=%CD%
goto findBaseDir
:baseDirFound
set MAVEN_PROJECTBASEDIR=%WDIR%
cd "%EXEC_DIR%"
goto endDetectBaseDir
:baseDirNotFound
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
cd "%EXEC_DIR%"
:endDetectBaseDir
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
@setlocal EnableExtensions EnableDelayedExpansion
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
:endReadAdditionalConfig
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
)
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
if exist %WRAPPER_JAR% (
if "%MVNW_VERBOSE%" == "true" (
echo Found %WRAPPER_JAR%
)
) else (
if not "%MVNW_REPOURL%" == "" (
SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
)
if "%MVNW_VERBOSE%" == "true" (
echo Couldn't find %WRAPPER_JAR%, downloading it ...
echo Downloading from: %DOWNLOAD_URL%
)
powershell -Command "&{"^
"$webclient = new-object System.Net.WebClient;"^
"if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
"$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
"}"^
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
"}"
if "%MVNW_VERBOSE%" == "true" (
echo Finished downloading %WRAPPER_JAR%
)
)
@REM End of extension
@REM Provide a "standardized" way to retrieve the CLI args that will
@REM work with both Windows and non-Windows executions.
set MAVEN_CMD_LINE_ARGS=%*
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
if ERRORLEVEL 1 goto error
goto end
:error
set ERROR_CODE=1
:end
@endlocal & set ERROR_CODE=%ERROR_CODE%
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
@REM check for post script, once with legacy .bat ending and once with .cmd ending
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
:skipRcPost
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
if "%MAVEN_BATCH_PAUSE%" == "on" pause
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
exit /B %ERROR_CODE%
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.2.5.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.example</groupId>
<artifactId>data-service</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>data-service</name>
<description>Demo project for Spring Boot</description>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.72</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.spring4all</groupId>
<artifactId>swagger-spring-boot-starter</artifactId>
<version>1.9.0.RELEASE</version>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
<version>1.9.6</version>
</dependency>
<!-- jpa操作 start-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<!-- jpa操作 end-->
<!-- hibernate-type start -->
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-hibernate5</artifactId>
<version>2.8.4</version>
</dependency>
<dependency>
<groupId>com.vladmihalcea</groupId>
<artifactId>hibernate-types-52</artifactId>
<!--for hibernate >= 5.2-->
<version>2.10.2</version>
</dependency>
<!-- hibernate-type end -->
<!-- 数据库连接池 start-->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>1.1.17</version>
</dependency>
<!-- 数据库连接池 end-->
<!-- 数据库连接 start-->
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.1.4</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.15</version>
</dependency>
<dependency>
<groupId>com.oracle</groupId>
<artifactId>ojdbc6</artifactId>
<version>11.2.0.3</version>
</dependency>
<!-- 数据库连接 end-->
<!-- excel start-->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>easyexcel</artifactId>
<version>3.2.1</version>
</dependency>
<!-- excel end-->
<!-- start es-->
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>7.2.0</version>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId>
<version>7.2.0</version>
<exclusions>
<exclusion>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-client</artifactId>
<version>7.2.0</version>
</dependency>
<!-- end es-->
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
package com.hisense.dataservice;
import com.github.xiaoymin.swaggerbootstrapui.annotations.EnableSwaggerBootstrapUI;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.domain.EntityScan;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@SpringBootApplication
@EnableTransactionManagement
@EntityScan({"com.hisense.dataservice"})
@EnableSwagger2
@EnableSwaggerBootstrapUI
public class DataServiceApplication {
public static void main(String[] args) {
SpringApplication.run(DataServiceApplication.class, args);
}
}
package com.hisense.dataservice.bo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.ArrayList;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/20
* 列数据范围过滤
* TODO tableName tableId 和columnId columnName不为空的设计
**/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class ColumnDataRangeFilter {
// 表名
private String tableName;
// 表名ID
private Long tableId;
// 字段
private List<DataColumn> fields;
private String generateSql(){
StringBuilder resultBuilder = new StringBuilder();
if(this.getFields().isEmpty()){
resultBuilder.append(this.getTableName())
.append(".*");
return resultBuilder.toString();
}
for (DataColumn field : this.getFields()) {
resultBuilder.append(this.getTableName())
.append(".")
.append(field.getColumnName())
.append(" as ")
.append(this.getTableName())
.append("_")
.append(field.getColumnName())
.append(",");
}
return resultBuilder.toString();
}
private List<List<String>> generateExcelHead(){
List<List<String>> heads = new ArrayList<>();
StringBuilder headBuilder;
List<String> head;
for (DataColumn field : this.getFields()) {
head = new ArrayList<>();
headBuilder = new StringBuilder();
headBuilder
.append(this.getTableName())
.append("_")
.append(field.getColumnName());
head.add(headBuilder.toString());
heads.add(head);
}
return heads;
}
/**
* 生成sql-select
* 例如: select * 或者 select t1.a, t2.b, t2.c
* @param columnDataRangeFilters
* @return
*/
public static String generateFilterSelectSql(List<ColumnDataRangeFilter> columnDataRangeFilters){
StringBuilder result = new StringBuilder();
if (columnDataRangeFilters.isEmpty()) {
result.append(" select * ");
return result.toString();
}
for (ColumnDataRangeFilter columnDataRangeFilter : columnDataRangeFilters) {
result.append(columnDataRangeFilter.generateSql());
}
return result.substring(0, result.lastIndexOf(","));
}
public static List<List<String>> generateExcelHead(List<ColumnDataRangeFilter> columnDataRangeFilters){
List<List<String>> result = new ArrayList<>();
for (ColumnDataRangeFilter columnDataRangeFilter : columnDataRangeFilters) {
result.addAll(columnDataRangeFilter.generateExcelHead());
}
return result;
}
}
\ No newline at end of file
package com.hisense.dataservice.bo;
import com.hisense.dataservice.enums.FieldTypeEnum;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class DataColumn {
// 字段名
private String columnName;
// 字段ID
private Long id;
// 字段类型
private FieldTypeEnum fieldType;
// 字段描述
private String desc;
}
\ No newline at end of file
package com.hisense.dataservice.bo;
import com.hisense.dataservice.enums.OperateEnum;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import org.springframework.util.CollectionUtils;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/27
**/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class FilterCondition {
// 表名
private String tableName;
// 表ID
private Long tableId;
// 字段
@NonNull
private DataColumn field;
// 过滤条件
@NonNull
private OperateEnum operateEnumType;
// 过滤值
@NonNull
private List<Object> filterValues;
/**
* 生成动态过滤条件sql
*
* @return
*/
public StringBuilder generateSqlFilterConditionForPreparedStatement(StringBuilder resultBuilder) {
// 拼接行条件
if (CollectionUtils.isEmpty(filterValues) ||
(operateEnumType == OperateEnum.BETWEEN_AND && filterValues.size() < 2) ||
field == null || field.getColumnName() == null) {
return resultBuilder;
}
// 拼接过滤sql
resultBuilder.append(tableName)
.append(".")
.append(field.getColumnName())
.append(" ");
switch (operateEnumType) {
case EQUAL: // 精准匹配
resultBuilder.append(" = ")
.append(" ? ");
break;
case LIKE: // 模糊匹配
resultBuilder.append(" like ")
.append(" %?% ");
break;
case BETWEEN_AND: // 数值或时间范围匹配
resultBuilder.append(" BETWEEN ")
.append(" ? ")
.append(" and ")
.append(" ? ");
break;
case IN:
resultBuilder.append(" IN ")
.append("(")
.append(" ? ") // 前端传递集合字符串
.append(")");
break;
default:
resultBuilder = new StringBuilder();
break;
}
return resultBuilder;
}
/**
* 生成静态过滤条件sql
*
* @return
*/
public StringBuilder generateSqlFilterConditionForStatement(StringBuilder resultBuilder) {
// 拼接行条件
if (CollectionUtils.isEmpty(filterValues) ||
(operateEnumType == OperateEnum.BETWEEN_AND && filterValues.size() < 2)) {
return resultBuilder;
}
Object firstValue = filterValues.get(0);
// 拼接过滤sql
resultBuilder.append(tableName)
.append(".")
.append(field.getColumnName())
.append(" ");
switch (operateEnumType) {
case EQUAL: // 精准匹配
resultBuilder.append(" = ")
.append(firstValue);
break;
case LIKE: // 模糊匹配
resultBuilder.append(" like ")
.append(" %")
.append(firstValue)
.append("% ");
break;
case BETWEEN_AND: // 数值或时间范围匹配
Object secondValue = filterValues.get(1);
resultBuilder.append(" BETWEEN ")
.append(firstValue)
.append(" and ")
.append(secondValue);
break;
default:
resultBuilder = new StringBuilder();
break;
}
return resultBuilder;
}
}
\ No newline at end of file
package com.hisense.dataservice.bo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/20
* 行数据范围过滤
**/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class LineDataRangeFilter {
// 条件全部满足还是满足一个: true:全部匹配, false: 只需要满足一个即可
private Boolean fullMatched = Boolean.TRUE;
// 过滤条件
private List<FilterCondition> filterConditions;
private Long tableId;
private String tableName;
/**
* TODO 优化stringbuilder只创建一份并且进行引用传递
* 生成where条件
* 例如: where 1=1 and (v1 or v2) and (v3 and v4)
*
* @param LineDataRangeFilters
* @return
*/
public static String generateFilterWhereSql(List<LineDataRangeFilter> LineDataRangeFilters) {
StringBuilder result = new StringBuilder();
// 拼接初始条件
if (LineDataRangeFilters.isEmpty()) {
return result.toString();
}
for (int i = 0; i < LineDataRangeFilters.size(); i++) {
if (i == LineDataRangeFilters.size() - 1) {
result = LineDataRangeFilters.get(i).generateFilterSql(result);
result.append(" ");
} else {
result = LineDataRangeFilters.get(i).generateFilterSql(result);
result.append(" and ");
}
}
return result.toString();
}
private StringBuilder generateFilterSql(StringBuilder result) {
// 拼接筛选条件
String logicalOperator = fullMatched ? " and " : " or ";
if (filterConditions.size() > 0) {
result.append(" ( ");
}
for (int i = 0; i < filterConditions.size(); i++) {
result = filterConditions.get(i).generateSqlFilterConditionForPreparedStatement(result);
if (i != filterConditions.size() - 1) {
result.append(logicalOperator);
}
}
if (filterConditions.size() > 0 && result.length() != 0) {
result.append(" ) ");
}
return result;
}
}
package com.hisense.dataservice.bo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* @author : scc
* @date : 2023/02/27
* 多表之间join关系配置
**/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class TableJoinRelationConfig {
private String tableName;
private String columnName;
private Long tableId;
private Long columnId;
}
package com.hisense.dataservice.controller;
import com.hisense.dataservice.library.model.page.PageLink;
import com.hisense.dataservice.library.model.page.SortOrder;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.StringUtils;
/**
* @author : scc
* @date : 2023/03/06
**/
@Slf4j
public abstract class BaseController {
protected PageLink createPageLink(int pageSize, int page, String textSearch, String sortProperty, String sortOrder) throws IllegalArgumentException {
if (!StringUtils.isEmpty(sortProperty)) {
SortOrder.Direction direction = SortOrder.Direction.ASC;
if (!StringUtils.isEmpty(sortOrder)) {
try {
direction = SortOrder.Direction.valueOf(sortOrder.toUpperCase());
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Unsupported sort order '" + sortOrder + "'! Only 'ASC' or 'DESC' types are allowed.");
}
}
SortOrder sort = new SortOrder(sortProperty, direction);
return new PageLink(pageSize, page - 1, textSearch, sort);
} else {
return new PageLink(pageSize, page - 1, textSearch);
}
}
protected PageLink createPageLink(int pageSize,int page){
return new PageLink(pageSize,page - 1);
}
}
package com.hisense.dataservice.controller;
import com.hisense.dataservice.library.model.Result;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.servlet.http.HttpServletRequest;
@ControllerAdvice
@Slf4j
public class BaseExceptionController {
@ExceptionHandler({Exception.class})
@ResponseBody
public Result<String> exceptionHandler(HttpServletRequest request, Exception e) {
log.error("发生了异常:", e);
String msg = e.getMessage();
return new Result(String.valueOf(HttpStatus.INTERNAL_SERVER_ERROR.value()),msg,null);
}
@ExceptionHandler({IllegalArgumentException.class})
@ResponseBody
public Result<String> exceptionHandler(HttpServletRequest request, IllegalArgumentException e) {
log.error("参数不合法:", e);
String msg = e.getMessage();
return new Result(String.valueOf(HttpStatus.BAD_REQUEST.value()),msg,null);
}
}
\ No newline at end of file
package com.hisense.dataservice.controller;
import com.hisense.dataservice.service.DataApiCommonService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
/**
* @author : scc
* @date : 2023/03/06
* 数据api服务统一入口
**/
@Api(tags = "数据api服务-公共服务")
@RestController
@RequestMapping("/api/v1/common/dataservice")
public class DataApiCommonServiceController {
@Autowired
private DataApiCommonService dataApiCommonService;
@ApiOperation(value = "公共服务-数据查询")
@ApiImplicitParams({
@ApiImplicitParam(name = "environment", value = "环境", required = true),
@ApiImplicitParam(name = "pattern", value = "请求路径", required = true)
})
@GetMapping("/{environment}/{pattern}")
public Object dataQuery(@PathVariable String environment, @PathVariable String pattern) {
return dataApiCommonService.queryData(environment, pattern);
}
}
package com.hisense.dataservice.controller;
import com.hisense.dataservice.dto.DataApiModelDto;
import com.hisense.dataservice.library.model.Result;
import com.hisense.dataservice.service.DataApiCommonService;
import com.hisense.dataservice.service.DataApiDataSourceManagementService;
import com.hisense.dataservice.service.DataApiServiceManagementService;
import io.swagger.annotations.Api;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/24
* 数据服务api管理
**/
@Api(tags = "数据api服务-api管理")
@RestController
@RequestMapping("/api/v1/dataService")
@Slf4j
public class DataServiceManagementController {
@Autowired
private DataApiServiceManagementService dataApiServiceManagementService;
@PostMapping("/")
public Result<Boolean> createOrUpdateDataApiService(DataApiModelDto dataApiModelDto){
return dataApiServiceManagementService.createOrUpdateDataApi(dataApiModelDto);
}
@GetMapping("/{environment}")
public List<Object> getPublishDataApiList(@PathVariable String environment) {
return null;
}
@PostMapping("/{environment}")
public Object publishDataApi(@PathVariable String environment,
@RequestBody DataApiModelDto dataApiModelDto) {
return null;
}
@PatchMapping("/{environment}")
public Object updateDataApi(@PathVariable String environment) {
return null;
}
@GetMapping("/dataFileDownload/{environment}/{pattern}/{fileId}")
public Object dataFileDownload(@PathVariable String environment, @PathVariable String pattern, @PathVariable String fileId) {
return null;
}
}
package com.hisense.dataservice.controller;
import com.hisense.dataservice.enums.DataSourceTypeEnum;
import com.hisense.dataservice.service.DataApiDataSourceManagementService;
import com.hisense.dataservice.library.model.page.PageData;
import com.hisense.dataservice.vo.DataFieldVo;
import com.hisense.dataservice.vo.DataSourceVo;
import com.hisense.dataservice.vo.DataTableVo;
import com.hisense.dataservice.library.model.Result;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author : scc
* @date : 2023/03/06
* 数据源管理
**/
@Api(tags = "数据api服务-数据源管理")
@RestController
@RequestMapping("/api/v1/datasource")
@Slf4j
public class DataSourceManagementController extends BaseController {
@Autowired
private DataApiDataSourceManagementService dataSourceManagementService;
@ApiOperation(value = "数据源管理-数据源列表(分页)")
@ApiImplicitParams({
@ApiImplicitParam(name = "environment", value = "环境", required = true),
@ApiImplicitParam(name = "type", value = "类型", required = true),
@ApiImplicitParam(name = "pageSize", value = "指定返回结果中每页显示的记录数量", defaultValue = "10", required = false),
@ApiImplicitParam(name = "page", value = "指定显示返回结果中的第几页", defaultValue = "1", required = false),
@ApiImplicitParam(name = "sortProperty", value = "根据哪个字段排序", required = false, allowableValues = "id,createdTime,updateTime"),
@ApiImplicitParam(name = "sortOrder", value = "排序方向", required = false, allowableValues = "ASC,DESC")
})
@GetMapping("/{environment}/pageList/{type}")
public Result<PageData<DataSourceVo>> getDataSourceList(@PathVariable String environment,
@PathVariable String type,
@RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize,
@RequestParam(value = "page", required = false, defaultValue = "1") Integer page,
@RequestParam(value = "sortProperty", required = false) String sortProperty,
@RequestParam(value = "sortOrder", required = false) String sortOrder) {
DataSourceTypeEnum dataSourceType = DataSourceTypeEnum.valueOf(type);
return dataSourceManagementService.pageQueryDataSourceListByType(environment, dataSourceType, createPageLink(pageSize, page, "", sortProperty, sortOrder));
}
@ApiOperation(value = "数据源管理-数据源列表")
@ApiImplicitParams({
@ApiImplicitParam(name = "environment", value = "环境", required = true),
@ApiImplicitParam(name = "type", value = "类型", required = true)
})
@GetMapping("/{environment}/list/{type}")
public Result<List<DataSourceVo>> getDataSourceList(@PathVariable String environment,
@PathVariable String type) {
DataSourceTypeEnum dataSourceType = DataSourceTypeEnum.valueOf(type);
return dataSourceManagementService.queryDataSourceListByType(environment, dataSourceType);
}
@ApiOperation(value = "数据源管理-数据表列表(分页)", notes = "根据数据源查询对应的数据表列表")
@ApiImplicitParams({
@ApiImplicitParam(name = "environment", value = "环境", required = true),
@ApiImplicitParam(name = "dataSourceId", value = "数据源ID", required = true),
@ApiImplicitParam(name = "pageSize", value = "指定返回结果中每页显示的记录数量", defaultValue = "10", required = false),
@ApiImplicitParam(name = "page", value = "指定显示返回结果中的第几页", defaultValue = "1", required = false),
@ApiImplicitParam(name = "sortProperty", value = "根据哪个字段排序", required = false, allowableValues = "id,createdTime,updateTime"),
@ApiImplicitParam(name = "sortOrder", value = "排序方向", required = false, allowableValues = "ASC,DESC")
})
@GetMapping("/{environment}/{dataSourceId}/pageTables")
public Result<PageData<DataTableVo>> getDataTableList(@PathVariable String environment,
@PathVariable Long dataSourceId,
@RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize,
@RequestParam(value = "page", required = false, defaultValue = "1") Integer page,
@RequestParam(value = "sortProperty", required = false) String sortProperty,
@RequestParam(value = "sortOrder", required = false) String sortOrder) {
return dataSourceManagementService.pageQueryDataTableListByDataSource(environment, dataSourceId, createPageLink(pageSize, page, "", sortProperty, sortOrder));
}
@ApiOperation(value = "数据源管理-数据表列表", notes = "根据数据源查询对应的数据表列表")
@ApiImplicitParams({
@ApiImplicitParam(name = "environment", value = "环境", required = true),
@ApiImplicitParam(name = "dataSourceId", value = "数据源ID", required = true)
})
@GetMapping("/{environment}/{dataSourceId}/tables")
public Result<List<DataTableVo>> getDataTableList(@PathVariable String environment,
@PathVariable Long dataSourceId) {
return dataSourceManagementService.queryDataTableListByDataSource(environment, dataSourceId);
}
@ApiOperation(value = "数据源管理-数据字段列表(分页)", notes = "根据数据表查询对应的数据字段列表")
@ApiImplicitParams({
@ApiImplicitParam(name = "environment", value = "环境", required = true),
@ApiImplicitParam(name = "tableId", value = "数据表ID", required = true),
@ApiImplicitParam(name = "pageSize", value = "指定返回结果中每页显示的记录数量", defaultValue = "10",required = false),
@ApiImplicitParam(name = "page", value = "指定显示返回结果中的第几页", defaultValue = "1", required = false),
@ApiImplicitParam(name = "sortProperty", value = "根据哪个字段排序", required = false, allowableValues = "id,createdTime,updateTime"),
@ApiImplicitParam(name = "sortOrder", value = "排序方向", required = false, allowableValues = "ASC,DESC")
})
@GetMapping("/{environment}/{tableId}/pageFields")
public Result<PageData<DataFieldVo>> getDataTableColumn(@PathVariable String environment,
@PathVariable Long tableId,
@RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize,
@RequestParam(value = "page", required = false, defaultValue = "1") Integer page,
@RequestParam(value = "sortProperty", required = false) String sortProperty,
@RequestParam(value = "sortOrder", required = false) String sortOrder) {
return dataSourceManagementService.pageQueryDataFieldListByTable(environment, tableId, createPageLink(pageSize, page, "", sortProperty, sortOrder));
}
@ApiOperation(value = "数据源管理-数据字段列表", notes = "根据数据表查询对应的数据字段列表")
@ApiImplicitParams({
@ApiImplicitParam(name = "environment", value = "环境", required = true),
@ApiImplicitParam(name = "tableId", value = "数据表ID", required = true)
})
@GetMapping("/{environment}/{tableId}/fields")
public Result<List<DataFieldVo>> getDataTableColumn(@PathVariable String environment,
@PathVariable Long tableId) {
return dataSourceManagementService.queryDataFieldListByTable(environment, tableId);
}
}
package com.hisense.dataservice.dto;
import com.hisense.dataservice.bo.ColumnDataRangeFilter;
import com.hisense.dataservice.bo.LineDataRangeFilter;
import com.hisense.dataservice.bo.TableJoinRelationConfig;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/24
**/
@Data
public class DataApiModelDto {
@ApiModelProperty(name = "数据模板ID", required = false)
private Long modelId;
@ApiModelProperty(name = "数据服务名", required = true)
private String modelName;
@ApiModelProperty(name = "数据源ID", required = true)
private Long dataSourceId;
@ApiModelProperty(name = "列数据范围", required = true)
private List<ColumnDataRangeFilter> columnDataRangeConfig;
@ApiModelProperty(name = "数据表关系", required = false)
private List<TableJoinRelationConfig> tableJoinRelationConfig;
@ApiModelProperty(name = "行数据范围", required = true)
private List<LineDataRangeFilter> lineDataRangeConfig;
@ApiModelProperty(name = "apiID", required = true)
private Integer apiID;
@ApiModelProperty(name = "发布系统ID", required = false)
private Long publishSystemId;
@ApiModelProperty(name = "订阅系统ID", required = false)
private Integer subscribeSystemId;
@ApiModelProperty(name = "网络环境", required = true)
private String networkEnv;
@ApiModelProperty(name = "系统环境", required = true)
private String systemEnv;
}
package com.hisense.dataservice.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import javax.persistence.*;
import java.util.Date;
/**
* @author : scc
* @date : 2023/02/20
**/
@MappedSuperclass
@Data
public class BaseEntity {
// 主键ID
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE)
protected Long id;
// 备注
protected String description;
// 创建时间
@JsonFormat(shape = JsonFormat.Shape.STRING,pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
protected Date createdTime;
// 修改时间
@JsonFormat(shape = JsonFormat.Shape.STRING,pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
protected Date updatedTime;
// 创建人
protected String creator;
// 修改人
protected String modifier;
// 删除标志(boolean)
@Column(name = "deleted", columnDefinition = "CHAR(1) default 0", nullable = false)
protected Boolean deleted = Boolean.FALSE;
}
package com.hisense.dataservice.entity;
import com.hisense.dataservice.enums.FieldTypeEnum;
import com.hisense.dataservice.vo.DataFieldVo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.persistence.*;
/**
* @author : scc
* @date : 2023/02/20
* 数据表字段配置
**/
@Data
@AllArgsConstructor
@NoArgsConstructor
@Entity
@Table(name = "data_api_column")
public class DataApiColumn extends BaseEntity {
// 数据表ID
private Long dataTableId;
// 字段名
private String fieldName;
// 字段类型
@Enumerated(EnumType.STRING)
@Column(name = "field_type", columnDefinition = "varchar2(32)")
private FieldTypeEnum filedType;
public DataFieldVo toItem(){
DataFieldVo dataFieldVo = new DataFieldVo();
dataFieldVo.setFieldId(this.getId());
dataFieldVo.setFieldName(this.getFieldName());
dataFieldVo.setFiledType(this.getFiledType().name());
dataFieldVo.setDataTableId(this.getDataTableId());
dataFieldVo.setDesc(this.getDescription());
return dataFieldVo;
}
}
package com.hisense.dataservice.entity;
import com.hisense.dataservice.enums.DataSourceTypeEnum;
import com.hisense.dataservice.vo.DataSourceVo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.persistence.*;
/**
* @author : scc
* @date : 2023/02/20
* 数据源配置
**/
@Data
@AllArgsConstructor
@NoArgsConstructor
@Entity
@Table(name = "data_api_datasource")
public class DataApiDataSource extends BaseEntity {
// 数据源名称
@Column(name = "source_name", columnDefinition = "varchar2(32)", nullable = false)
private String sourceName;
// 数据源类型
@Enumerated(EnumType.STRING)
@Column(name = "type", columnDefinition = "varchar2(32)", nullable = false)
private DataSourceTypeEnum type;
// 数据源URL
@Column(name = "url", columnDefinition = "varchar2(255)", nullable = false)
private String url;
// 数据源用户名
@Column(name = "username", columnDefinition = "varchar2(255)", nullable = false)
private String username;
// 数据源密码
@Column(name = "password", columnDefinition = "varchar2(255)", nullable = false)
private String password;
// 数据驱动
@Column(name = "driver", columnDefinition = "varchar2(255)", nullable = false)
private String driver;
public DataSourceVo toItem(){
DataSourceVo dataSourceVo = new DataSourceVo();
dataSourceVo.setDescription(this.description);
dataSourceVo.setCreateTime(this.getCreatedTime());
dataSourceVo.setUpdateTime(this.getUpdatedTime());
dataSourceVo.setId(this.getId());
dataSourceVo.setName(this.getSourceName());
dataSourceVo.setType(this.getType().name());
return dataSourceVo;
}
}
package com.hisense.dataservice.entity;
import com.hisense.dataservice.bo.TableJoinRelationConfig;
import com.hisense.dataservice.enums.NetworkEnv;
import com.hisense.dataservice.enums.SystemEnv;
import com.vladmihalcea.hibernate.type.json.JsonBlobType;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.hibernate.annotations.Type;
import org.hibernate.annotations.TypeDef;
import javax.persistence.*;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/20
**/
@Data
@AllArgsConstructor
@NoArgsConstructor
@Entity
@Table(name = "data_api_model")
@TypeDef(name = "json", typeClass = JsonBlobType.class)
public class DataApiModel extends BaseEntity{
// 名称
private String name;
// 数据源ID
@Column(nullable = false)
private Long dataSourceId;
// 数据表和对应列集合 Map<数据表ID,列ID列表> 提供订阅时使用
// @Type( type = "json" )
// private List<TableWithColumnRelationConfig> dataTableIdWithColumnIds;
// 数据表关系配置 Map<数据表ID,列名ID>
@Type( type = "json" )
private List<TableJoinRelationConfig> tableJoinRelationConfig;
// apiId
@Column(nullable = false)
private Integer apiID;
// 发布系统ID
@Column(nullable = false)
private Long publishSystemId;
// 内外网
@Enumerated(EnumType.STRING)
@Column(name = "network_env", columnDefinition = "varchar2(10)", nullable = false)
private NetworkEnv networkEnv;
// 环境(测试或生产),保留字段,可不填写
@Enumerated(EnumType.STRING)
@Column(name = "system_env", columnDefinition = "varchar2(10)", nullable = true)
private SystemEnv systemEnv;
// 发布:1、草稿:0
private Integer status;
}
package com.hisense.dataservice.entity;
import com.hisense.dataservice.bo.ColumnDataRangeFilter;
import com.hisense.dataservice.bo.LineDataRangeFilter;
import com.hisense.dataservice.bo.TableJoinRelationConfig;
import com.hisense.dataservice.enums.DataSourceTypeEnum;
import com.hisense.dataservice.enums.ModelConfigType;
import com.vladmihalcea.hibernate.type.json.JsonBlobType;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.hibernate.annotations.Type;
import org.hibernate.annotations.TypeDef;
import org.springframework.util.StringUtils;
import javax.persistence.*;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/20
* 行列筛选条件配置
**/
@Data
@AllArgsConstructor
@NoArgsConstructor
@Entity
@Table(name = "data_api_model_config")
@TypeDef(name = "json", typeClass = JsonBlobType.class)
public class DataApiModelConfig extends BaseEntity{
// // 数据模板ID
// private Long dataModelId;
// 数据模板ID 或 订阅配置ID(数据api发布时为空)
private Long typeId;
// 类型(发布 订阅)
@Enumerated(EnumType.STRING)
@Column(name = "type", columnDefinition = "varchar2(10)", nullable = false)
private ModelConfigType type;
// // 订阅配置ID(数据api发布时为空)
// private Long subscribeConfigId;
// 行数据范围
@Type( type = "json" )
private List<LineDataRangeFilter> lineDataRangeConfig;
// 列数据范围
@Type( type = "json" )
private List<ColumnDataRangeFilter> columnDataRangeConfig;
// select语句
@Transient
private String selectSql;
// from语句
@Transient
private String fromSql;
// where语句
@Transient
private String whereSql;
// SQL语句,记录生成的sql
@Column(name = "sql", columnDefinition = "varchar2(1000)", nullable = true)
private String fullSql;
// 根据订阅系统的配置生成sql行列筛选条件
// 生成的时候需要进行校验
public String generateAndSetFullSql(List<TableJoinRelationConfig> tableJoinRelationConfigs, DataSourceTypeEnum dataSourceTypeEnum){
// 已经通过校验行列筛选条件
StringBuilder fullSqlBuilder = new StringBuilder();
// 拼接select条件
String selectSql = generateSelectSql(this.getColumnDataRangeConfig());
if (StringUtils.isEmpty(selectSql)) {
return null;
}
fullSqlBuilder.append(" select ").append(selectSql);
// 拼接from条件
String fromSql = generateFromSql(tableJoinRelationConfigs);
if (StringUtils.isEmpty(fromSql)) {
return null;
}
fullSqlBuilder.append(fromSql);
// 拼接where条件
String whereSql = generateWhereSql(this.getLineDataRangeConfig());
if (StringUtils.isEmpty(whereSql)) {
return null;
}
fullSqlBuilder.append(" where ").append(whereSql);
// 拼接limit 条件
fullSqlBuilder = generateLimitSql(fullSqlBuilder, dataSourceTypeEnum);
// 生成完成的sql
String fullSql = fullSqlBuilder.toString();
// 把拼接完成的fullSql保存
this.setFullSql(fullSql);
this.setFromSql(fromSql);
this.setSelectSql(selectSql);
this.setWhereSql(whereSql);
return fullSql;
}
// 生成数据量大小查询sql
public String generateDataCountSql(DataSourceTypeEnum dataSourceTypeEnum) {
// 已经通过校验行列筛选条件
StringBuilder fullSqlBuilder = new StringBuilder();
// 拼接select条件
fullSqlBuilder.append(" select count(1) ");
// 拼接from条件
String fromSql = this.getFromSql();
fullSqlBuilder.append(fromSql);
// 拼接where条件
String whereSql = this.getWhereSql();
fullSqlBuilder.append(" where ").append(whereSql);
// 生成完成的sql
String fullSql = fullSqlBuilder.toString();
return fullSql;
}
/**
* 生成 from 条件
*/
private String generateFromSql(List<TableJoinRelationConfig> tableJoinRelationConfigs) {
StringBuilder resultBuilder = new StringBuilder();
if (tableJoinRelationConfigs.size() > 0) {
TableJoinRelationConfig firstRelation = tableJoinRelationConfigs.get(0);
resultBuilder.append(" FROM ")
.append(firstRelation.getTableName()).append(" ");
}
for (int i = 1; i < tableJoinRelationConfigs.size(); i++) {
TableJoinRelationConfig curConfig = tableJoinRelationConfigs.get(i);
TableJoinRelationConfig preConfig = tableJoinRelationConfigs.get(i - 1);
resultBuilder.append(" INNER JOIN ")
.append(curConfig.getTableName())
.append(" ON ")
.append(curConfig.getTableName())
.append(".")
.append(curConfig.getColumnName())
.append(" = ")
.append(preConfig.getTableName())
.append(".")
.append(preConfig.getColumnName())
.append(" ");
}
return resultBuilder.toString();
}
/**
* 生成 select 条件
*/
private String generateSelectSql(List<ColumnDataRangeFilter> columnDataRangeConfig) {
String selectSql = ColumnDataRangeFilter.generateFilterSelectSql(columnDataRangeConfig);
return selectSql;
}
/**
* 生成 where 条件
*/
private String generateWhereSql(List<LineDataRangeFilter> lineDataRangeFilters) {
String whereSqlResult = LineDataRangeFilter.generateFilterWhereSql(lineDataRangeFilters);
return whereSqlResult;
}
/**
* 生成 limit 条件
*
* @param fullBuilder
*/
private StringBuilder generateLimitSql(StringBuilder fullBuilder, DataSourceTypeEnum dataSourceType) {
return dataSourceType.generateLimitSql(fullBuilder);
}
}
package com.hisense.dataservice.entity;
import com.hisense.dataservice.enums.NetworkEnv;
import com.hisense.dataservice.enums.SystemEnv;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.persistence.*;
/**
* @author : scc
* @date : 2023/02/24
**/
@Data
@AllArgsConstructor
@NoArgsConstructor
@Entity
@Table(name = "data_api_subscribe_config")
public class DataApiSubscribeConfig extends BaseEntity {
// 数据模板ID
@Column(name = "data_api_model_id", nullable = false)
private Long dataApiModelId;
// 订阅系统ID
@Column(name = "subscribe_system_id", nullable = false)
private Integer subscribeSystemId;
// 内外网
@Enumerated(EnumType.STRING)
@Column(name = "network_env", columnDefinition = "varchar2(10)", nullable = false)
private NetworkEnv networkEnv;
// 环境(测试或生产)
@Enumerated(EnumType.STRING)
@Column(name = "system_env", columnDefinition = "varchar2(10)", nullable = false)
private SystemEnv systemEnv;
}
package com.hisense.dataservice.entity;
import com.hisense.dataservice.vo.DataTableVo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.persistence.Entity;
import javax.persistence.Table;
/**
* @author : scc
* @date : 2023/02/20
* 数据表配置
**/
@Data
@AllArgsConstructor
@NoArgsConstructor
@Entity
@Table(name = "data_api_table")
public class DataApiTable extends BaseEntity{
// 表名
private String tableName;
// 数据源ID
private Long dataSourceId;
public DataTableVo toItem(){
DataTableVo dataTableVo = new DataTableVo();
dataTableVo.setTableName(this.getTableName());
dataTableVo.setDataSourceId(this.getDataSourceId());
dataTableVo.setDescription(this.getDescription());
dataTableVo.setTableId(this.getId());
return dataTableVo;
}
}
package com.hisense.dataservice.entity.convert;
import com.alibaba.fastjson.JSONObject;
import com.hisense.dataservice.bo.ColumnDataRangeFilter;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import javax.persistence.AttributeConverter;
import java.util.ArrayList;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/20
* 数据范围过滤条件转换器
**/
public class ColumnDataRangeFilterAttributeConvert implements AttributeConverter<List<ColumnDataRangeFilter>, String> {
@Override
public String convertToDatabaseColumn(List<ColumnDataRangeFilter> columnDataRangeFilters) {
return CollectionUtils.isEmpty(columnDataRangeFilters) ? "" : JSONObject.toJSONString(columnDataRangeFilters);
}
@Override
public List<ColumnDataRangeFilter> convertToEntityAttribute(String dataRangeFilterJsonStr) {
return StringUtils.isEmpty(dataRangeFilterJsonStr) ? new ArrayList<>() : JSONObject.parseObject(dataRangeFilterJsonStr, List.class);
}
}
package com.hisense.dataservice.entity.convert;
import com.alibaba.fastjson.JSONObject;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import javax.persistence.AttributeConverter;
import java.util.ArrayList;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/20
* 关联表ids转换器
**/
public class DataTableIdsAttributeConvert implements AttributeConverter<List<Integer>, String> {
@Override
public String convertToDatabaseColumn(List<Integer> dataTableIds) {
return CollectionUtils.isEmpty(dataTableIds) ? "" : JSONObject.toJSONString(dataTableIds);
}
@Override
public List<Integer> convertToEntityAttribute(String dataTableIdJsonStr) {
return StringUtils.isEmpty(dataTableIdJsonStr) ? new ArrayList<>() : JSONObject.parseObject(dataTableIdJsonStr, List.class);
}
}
package com.hisense.dataservice.entity.convert;
import com.alibaba.fastjson.JSONObject;
import com.hisense.dataservice.bo.LineDataRangeFilter;
import org.springframework.util.StringUtils;
import javax.persistence.AttributeConverter;
/**
* @author : scc
* @date : 2023/02/20
* 数据范围过滤条件转换器
**/
public class LineDataRangeFilterAttributeConvert implements AttributeConverter<LineDataRangeFilter, String> {
@Override
public String convertToDatabaseColumn(LineDataRangeFilter LineDataRangeFilter) {
return LineDataRangeFilter == null ? "" : JSONObject.toJSONString(LineDataRangeFilter);
}
@Override
public LineDataRangeFilter convertToEntityAttribute(String dataRangeFilterJsonStr) {
return StringUtils.isEmpty(dataRangeFilterJsonStr) ? new LineDataRangeFilter() : JSONObject.parseObject(dataRangeFilterJsonStr, LineDataRangeFilter.class);
}
}
package com.hisense.dataservice.entity.convert;
import com.alibaba.fastjson.JSONObject;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import javax.persistence.AttributeConverter;
/**
* @author : scc
* @date : 2023/02/20
* 关联表关系转换器
**/
public class TableRelationsAttributeConvert implements AttributeConverter<JSONObject, String> {
@Override
public String convertToDatabaseColumn(JSONObject tableRelations) {
return CollectionUtils.isEmpty(tableRelations) ? "" : JSONObject.toJSONString(tableRelations);
}
@Override
public JSONObject convertToEntityAttribute(String tableRelationJsonStr) {
return StringUtils.isEmpty(tableRelationJsonStr) ? new JSONObject() : JSONObject.parseObject(tableRelationJsonStr);
}
}
package com.hisense.dataservice.enums;
/**
* @author : scc
* @date : 2023/02/20
* 数据源类型
**/
public enum DataSourceTypeEnum {
ORACLE,
MYSQL,
POSTGRESQL;
public StringBuilder generateLimitSql(StringBuilder fullSqlBuilder) {
switch (this) {
case ORACLE:
StringBuilder preSqlBuilder = new StringBuilder();
preSqlBuilder.append("SELECT * FROM (SELECT ROWNUM RN, DBS.* FROM ( ");
// SELECT * FROM (SELECT ROWNUM RN,XX.* FROM (SELECT 表名.字段名, 表名.字段名, 表名.字段名... FROM TABLE1 t1, TABLE2 t2 WHERE t1.字段=t2.字段) XX WHERE ROWNUM<=pageSize*pageNumber) WHERE RN >(pageNumber-1)*pageSize
preSqlBuilder.append(fullSqlBuilder).append(" ) DBS WHERE ROWNUM <= ? ) WHERE RN > ? ");
return preSqlBuilder;
case MYSQL:
//-- select 字段 from (select rownum r,表名.* from 表名) where r between m and n;
//-- m:需要查询的页数,n:每页显示几条数据
//-- 例:查询第一页,每页显示4条数据
//select * from information_schema.tables limit 0,4;
fullSqlBuilder.append("limit ?, ?");
return fullSqlBuilder;
case POSTGRESQL:
//-- select 字段 from 库名.表名 limit n offset m;
//-- m:需要查询的页数减1,n:每页显示几条数据
//-- 例:查询第一页,每页显示4条数据
//select * from information_schema.tables limit 4 offset 0;
fullSqlBuilder.append("limit ? offset ?");
return fullSqlBuilder;
}
return fullSqlBuilder;
}
}
package com.hisense.dataservice.enums;
/**
* @author : scc
* @date : 2023/02/20
* 字段类型
* TODO 根据数据源配置不同的数据类型
**/
public enum FieldTypeEnum {
STRING,
INTEGER,
LONG,
DATE,
TIMESTAMP,
DOUBLE;
}
package com.hisense.dataservice.enums;
/**
* @author : scc
* @date : 2023/02/28
**/
public enum ModelConfigType {
PUBLISH("发布"),SUBSCRIBE("订阅");
private String desc;
private ModelConfigType(String desc){
this.desc = desc;
}
}
package com.hisense.dataservice.enums;
/**
* @author : scc
* @date : 2023/02/24
* 网络环境
**/
public enum NetworkEnv {
INNER("内网"), OUTER("外网");
private String desc;
private NetworkEnv(String desc){
this.desc = desc;
}
}
package com.hisense.dataservice.enums;
/**
* @author : scc
* @date : 2023/02/20
* 数据过滤操作类型
**/
public enum OperateEnum {
GREATER_THAN("大于"),
LESS_THAN("小于"),
EQUAL("等于"),
NOT_EQUAL("不等于"),
GT_EQ("大于等于"),
LT_EQ("小于等于"),
LIKE("模糊匹配"),
IN("集合"),
BETWEEN_AND("范围");
private String desc;
private OperateEnum(String desc){
this.desc = desc;
}
}
\ No newline at end of file
package com.hisense.dataservice.enums;
/**
* @author : scc
* @date : 2023/02/24
* 系统环境
**/
public enum SystemEnv {
DEV("测试"),PRD("生产");
private String desc;
private SystemEnv(String desc){
this.desc = desc;
}
}
package com.hisense.dataservice.library.model;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
public class Result<T> implements Serializable {
public static final String OK = "0";
public static final String FAIL = "1";
public static final String OTHER = "2";
private static final String FAIL_MSG = "fail to invoke";
private String code;
private String msg;
private Integer alert;
private T data;
public Result() {
this.code = OK;
}
public Result(T data) {
this.code = OK;
this.setData(data);
}
public Result(String code, String msg) {
this.code = code;
this.msg = msg;
}
public Result(String code, T data) {
this.code = code;
this.data = data;
}
public Result(String code, String msg, T data) {
this.code = code;
this.msg = msg;
this.data = data;
}
public Result(String code, String msg, T data, Integer alert) {
this.code = code;
this.msg = msg;
this.data = data;
this.alert = alert;
}
public Result<T> setError(String msg) {
this.setCode(FAIL);
this.setMsg(msg);
return this;
}
public Result<T> setError(String code, String msg) {
this.setCode(code);
this.setMsg(msg);
return this;
}
public static Result instance() {
return new Result();
}
public static <T> Result instance(T data) {
return new Result(data);
}
public static <T> Result instance(String code, String msg) {
return new Result(code, msg);
}
public static <T> Result instance(String code, String msg, T data) {
return new Result(code, msg, data);
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public Integer getAlert() {
return alert;
}
public void setAlert(Integer alert) {
this.alert = alert;
}
public T getData() {
return data;
}
public void setData(T data) {
this.data = data;
}
public Map<String, Object> toJsonMap() {
Map<String, Object> map = new HashMap<>();
map.put("data", this.data);
map.put("msg", this.msg);
map.put("code", this.code);
return map;
}
public boolean isSuccess() {
return OK.equals(this.code);
}
public boolean isFailure() {
return !isSuccess();
}
@Override
public String toString() {
return "Result{" +
"code='" + code + '\'' +
", msg='" + msg + '\'' +
", alert=" + alert +
", data=" + data +
'}';
}
}
\ No newline at end of file
/**
* Copyright © 2016-2022 The Kubenote Authors
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hisense.dataservice.library.model.page;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Collections;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
@ApiModel
@Data
public class PageData<T> {
private List<T> data;
private int totalPages;
private long totalElements;
private boolean hasNext;
private long currentPage;
public PageData() {
this(Collections.emptyList(), 0, 0, false);
}
@JsonCreator
public PageData(@JsonProperty("data") List<T> data,
@JsonProperty("totalPages") int totalPages,
@JsonProperty("totalElements") long totalElements,
@JsonProperty("hasNext") boolean hasNext) {
this.data = data;
this.totalPages = totalPages;
this.totalElements = totalElements;
this.hasNext = hasNext;
}
@JsonCreator
public PageData(@JsonProperty("totalPages") int totalPages,
@JsonProperty("totalElements") long totalElements,
@JsonProperty("currentPage") long currentPage,
@JsonProperty("hasNext") boolean hasNext) {
this.totalPages = totalPages;
this.totalElements = totalElements;
this.currentPage = currentPage + 1;
this.hasNext = hasNext;
}
@ApiModelProperty(position = 1, value = "Array of the entities", accessMode = ApiModelProperty.AccessMode.READ_ONLY)
public List<T> getData() {
return data;
}
@ApiModelProperty(position = 2, value = "Total number of available pages. Calculated based on the 'pageSize' request parameter and total number of entities that match search criteria", accessMode = ApiModelProperty.AccessMode.READ_ONLY)
public int getTotalPages() {
return totalPages;
}
@ApiModelProperty(position = 3, value = "Total number of elements in all available pages", accessMode = ApiModelProperty.AccessMode.READ_ONLY)
public long getTotalElements() {
return totalElements;
}
@ApiModelProperty(position = 4, value = "'false' value indicates the end of the result set", accessMode = ApiModelProperty.AccessMode.READ_ONLY)
@JsonProperty("hasNext")
public boolean hasNext() {
return hasNext;
}
public <D> PageData<D> mapData(Function<T, D> mapper) {
return new PageData<>(getData().stream().map(mapper).collect(Collectors.toList()), getTotalPages(), getTotalElements(), hasNext());
}
}
package com.hisense.dataservice.library.model.page;
import com.fasterxml.jackson.annotation.JsonIgnore;
import lombok.Data;
import org.springframework.data.domain.Sort;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Data
public class PageLink {
protected static final String DEFAULT_SORT_PROPERTY = "id";
private static final Sort DEFAULT_SORT = Sort.by(Sort.Direction.ASC, DEFAULT_SORT_PROPERTY);
private final String textSearch;
private final int pageSize;
private final int page;
private final SortOrder sortOrder;
public PageLink(PageLink pageLink) {
this.pageSize = pageLink.getPageSize();
this.page = pageLink.getPage();
this.textSearch = pageLink.getTextSearch();
this.sortOrder = pageLink.getSortOrder();
}
public PageLink(int pageSize) {
this(pageSize, 0);
}
public PageLink(int pageSize, int page) {
this(pageSize, page, null, null);
}
public PageLink(int pageSize, int page, String textSearch) {
this(pageSize, page, textSearch, null);
}
public PageLink(int pageSize, int page, String textSearch, SortOrder sortOrder) {
this.pageSize = pageSize;
this.page = page;
this.textSearch = textSearch;
this.sortOrder = sortOrder;
}
@JsonIgnore
public PageLink nextPageLink() {
return new PageLink(this.pageSize, this.page+1, this.textSearch, this.sortOrder);
}
public Sort toSort(SortOrder sortOrder, Map<String,String> columnMap) {
if (sortOrder == null) {
return DEFAULT_SORT;
} else {
String property = sortOrder.getProperty();
if (columnMap.containsKey(property)) {
property = columnMap.get(property);
}
return Sort.by(Sort.Direction.fromString(sortOrder.getDirection().name()), property);
}
}
public Sort toSort(List<SortOrder> sortOrders, Map<String,String> columnMap) {
return Sort.by(sortOrders.stream().map(s -> toSortOrder(s, columnMap)).collect(Collectors.toList()));
}
private Sort.Order toSortOrder(SortOrder sortOrder, Map<String,String> columnMap) {
String property = sortOrder.getProperty();
if (columnMap.containsKey(property)) {
property = columnMap.get(property);
}
return new Sort.Order(Sort.Direction.fromString(sortOrder.getDirection().name()), property, Sort.NullHandling.NULLS_LAST);
}
}
package com.hisense.dataservice.library.model.page;
import lombok.Data;
@Data
public class SortOrder {
private final String property;
private final Direction direction;
public SortOrder(String property) {
this(property, Direction.ASC);
}
public SortOrder(String property, Direction direction) {
this.property = property;
this.direction = direction;
}
public static enum Direction {
ASC, DESC
}
}
package com.hisense.dataservice.repository;
import com.hisense.dataservice.entity.DataApiColumn;
import com.hisense.dataservice.entity.DataApiTable;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/23
**/
@Repository
public interface DataApiColumnRepository extends JpaRepository<DataApiColumn,Long> {
// 根据数据表ID查询所有的列信息
List<DataApiColumn> findAllByDataTableIdAndDeletedIsFalse(Long dataTableId);
Page<DataApiColumn> findAll(Specification specification, Pageable pageable);
// 判断数据表中是否存在某个字段
boolean existsByDataTableIdAndFieldName(Long dataTableId,String fileName);
}
package com.hisense.dataservice.repository;
import com.hisense.dataservice.entity.DataApiDataSource;
import com.hisense.dataservice.enums.DataSourceTypeEnum;
import com.hisense.dataservice.vo.DataSourceVo;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/23
**/
@Repository
public interface DataApiDataSourceRepository extends JpaRepository<DataApiDataSource,Long> {
List<DataSourceVo> findAllByDeletedIsFalse();
List<DataApiDataSource> findByTypeAndDeletedIsFalse(DataSourceTypeEnum sourceType);
Page<DataApiDataSource> findAll(Specification specification, Pageable pageable);
@Query(value ="SELECT distinct ds.type" +
"FROM data_api_datasource ds " +
"WHERE ds.deleted = false", nativeQuery = true)
List<String> findAllSourceType();
}
package com.hisense.dataservice.repository;
import com.hisense.dataservice.entity.DataApiModelConfig;
import com.hisense.dataservice.enums.ModelConfigType;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.Optional;
/**
* @author : scc
* @date : 2023/02/23
**/
@Repository
public interface DataApiModelConfigRepository extends JpaRepository<DataApiModelConfig,Long> {
Optional<DataApiModelConfig> findByTypeIdAndType(Long typeId, ModelConfigType type);
}
package com.hisense.dataservice.repository;
import com.hisense.dataservice.entity.DataApiModel;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.Optional;
/**
* @author : scc
* @date : 2023/02/23
**/
@Repository
public interface DataApiModelRepository extends JpaRepository<DataApiModel,Long> {
Optional<DataApiModel> findByApiID(Integer apiId);
}
package com.hisense.dataservice.repository;
import com.hisense.dataservice.entity.DataApiSubscribeConfig;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.Optional;
/**
* @author : scc
* @date : 2023/02/23
**/
@Repository
public interface DataApiSubscribeRepository extends JpaRepository<DataApiSubscribeConfig, Long> {
Optional<DataApiSubscribeConfig> findBySubscribeSystemIdAndDataApiModelIdAndDeletedIsFalse(Integer subscribeSystemId, Long dataApiModelId);
}
package com.hisense.dataservice.repository;
import com.alibaba.fastjson.JSONObject;
import com.hisense.dataservice.entity.DataApiDataSource;
import com.hisense.dataservice.entity.DataApiTable;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/23
**/
@Repository
public interface DataApiTableRepository extends JpaRepository<DataApiTable,Long> {
// 根据数据源ID查询所有的数据表
List<DataApiTable> findAllByDataSourceIdAndDeletedIsFalse(Long dataSourceId);
Page<DataApiTable> findAll(Specification specification, Pageable pageable);
// 根据数据源ID查询所有的数据表
List<DataApiTable> findAllByDataSourceIdAndId(Long dataSourceId,Long id);
@Query(value ="SELECT t.id as id, t.name as tableName, c.name as columnName" +
"FROM data_api_table t " +
"INNER JOIN data_api_column c on t.id = c.data_table_id " +
"WHERE t.id= :tableId and c.id = :columnId", nativeQuery = true)
JSONObject queryTableAndColumnName(@Param("tableId") Long tableId, @Param("columnId") Long columnId);
}
package com.hisense.dataservice.service;
import com.hisense.dataservice.library.model.page.PageData;
import com.hisense.dataservice.library.model.page.PageLink;
import com.hisense.dataservice.library.model.page.SortOrder;
import com.hisense.dataservice.library.model.Result;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.http.HttpStatus;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* @author : scc
* @date : 2023/03/06
**/
public interface BaseService {
default PageData pageToPageData(Page page) {
return new PageData<>(page.getTotalPages(), page.getTotalElements(), page.getNumber(), page.hasNext());
}
default Pageable toPageable(PageLink pageLink) {
return toPageable(pageLink, Collections.emptyMap());
}
default PageRequest toPageRequest(PageLink pageLink){
return PageRequest.of(pageLink.getPage(),pageLink.getPageSize());
}
default Pageable toPageable(PageLink pageLink, Map<String, String> columnMap) {
return PageRequest.of(pageLink.getPage(), pageLink.getPageSize(), pageLink.toSort(pageLink.getSortOrder(), columnMap));
}
default Pageable toPageable(PageLink pageLink, List<SortOrder> sortOrders) {
return toPageable(pageLink, Collections.emptyMap(), sortOrders);
}
default Pageable toPageable(PageLink pageLink, Map<String, String> columnMap, List<SortOrder> sortOrders) {
return PageRequest.of(pageLink.getPage(), pageLink.getPageSize(), pageLink.toSort(sortOrders, columnMap));
}
default <T> Result<T> success(String msg, T data) {
return new Result<T>(String.valueOf(HttpStatus.OK.value()), msg, data);
}
default <T> Result<T> success(String msg) {
return new Result<T>(String.valueOf(HttpStatus.OK.value()), msg);
}
default <T> Result<T> success(T data) {
return new Result<T>(String.valueOf(HttpStatus.OK.value()), data);
}
default <T> Result<T> failure(String msg, T data) {
return new Result<>(String.valueOf(HttpStatus.INTERNAL_SERVER_ERROR.value()), msg, data);
}
default <T> Result<T> failure(String message) {
return new Result<>(String.valueOf(HttpStatus.INTERNAL_SERVER_ERROR.value()), message);
}
}
package com.hisense.dataservice.service;
import org.springframework.web.bind.annotation.PathVariable;
/**
* @author : scc
* @date : 2023/02/24
**/
public interface DataApiCommonService {
Object queryData(@PathVariable String environment, @PathVariable String pattern);
}
package com.hisense.dataservice.service;
import com.hisense.dataservice.enums.DataSourceTypeEnum;
import com.hisense.dataservice.library.model.page.PageData;
import com.hisense.dataservice.library.model.page.PageLink;
import com.hisense.dataservice.vo.DataFieldVo;
import com.hisense.dataservice.vo.DataSourceVo;
import com.hisense.dataservice.vo.DataTableVo;
import com.hisense.dataservice.library.model.Result;
import java.util.List;
/**
* @author : scc
* @date : 2023/03/06
**/
public interface DataApiDataSourceManagementService extends BaseService {
Result<List<String>> queryDataSourceTypeList(String environment);
Result<List<DataSourceVo>> queryDataSourceListByType(String environment, DataSourceTypeEnum dataSourceType);
Result<PageData<DataSourceVo>> pageQueryDataSourceListByType(String environment, DataSourceTypeEnum dataSourceType, PageLink pageLink);
Result<List<DataTableVo>> queryDataTableListByDataSource(String environment, Long dataSourceId);
Result<PageData<DataTableVo>> pageQueryDataTableListByDataSource(String environment, Long dataSourceId, PageLink pageLink);
Result<List<DataFieldVo>> queryDataFieldListByTable(String environment, Long tableId);
Result<PageData<DataFieldVo>> pageQueryDataFieldListByTable(String environment, Long tableId, PageLink pageLink);
}
package com.hisense.dataservice.service;
import com.hisense.dataservice.dto.DataApiModelDto;
import com.hisense.dataservice.library.model.Result;
/**
* @author : scc
* @date : 2023/03/06
**/
public interface DataApiServiceManagementService extends BaseService {
Result<Boolean> createOrUpdateDataApi(DataApiModelDto dataApiModelDto);
}
package com.hisense.dataservice.service.impl;
import com.hisense.dataservice.entity.DataApiColumn;
import com.hisense.dataservice.entity.DataApiDataSource;
import com.hisense.dataservice.entity.DataApiTable;
import com.hisense.dataservice.enums.DataSourceTypeEnum;
import com.hisense.dataservice.repository.DataApiColumnRepository;
import com.hisense.dataservice.repository.DataApiDataSourceRepository;
import com.hisense.dataservice.repository.DataApiTableRepository;
import com.hisense.dataservice.service.DataApiDataSourceManagementService;
import com.hisense.dataservice.library.model.page.PageData;
import com.hisense.dataservice.library.model.page.PageLink;
import com.hisense.dataservice.vo.DataFieldVo;
import com.hisense.dataservice.vo.DataSourceVo;
import com.hisense.dataservice.vo.DataTableVo;
import com.hisense.dataservice.library.model.Result;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.stereotype.Service;
import javax.persistence.criteria.Predicate;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
/**
* @author : scc
* @date : 2023/03/06
**/
@Service
@Slf4j
public class DataApiDataSourceManagementServiceImpl implements DataApiDataSourceManagementService {
@Autowired
private DataApiDataSourceRepository dataApiDataSourceRepository;
@Autowired
private DataApiTableRepository dataApiTableRepository;
@Autowired
private DataApiColumnRepository dataApiColumnRepository;
@Override
public Result<List<String>> queryDataSourceTypeList(String environment) {
List<String> sourceTypeList = dataApiDataSourceRepository.findAllSourceType();
return success(sourceTypeList);
}
// TODO 放入缓存
@Override
public Result<List<DataSourceVo>> queryDataSourceListByType(String environment, DataSourceTypeEnum dataSourceType) {
List<DataSourceVo> results = new ArrayList<>();
List<DataApiDataSource> dataApiDataSources = dataApiDataSourceRepository.findByTypeAndDeletedIsFalse(dataSourceType);
for (DataApiDataSource dataApiDataSource : dataApiDataSources) {
results.add(dataApiDataSource.toItem());
}
return success(results);
}
@Override
public Result<PageData<DataSourceVo>> pageQueryDataSourceListByType(String environment, DataSourceTypeEnum dataSourceType, PageLink pageLink) {
Specification<DataApiDataSource> spec = (root, query, builder) -> {
List<Predicate> andList = new LinkedList<>();
andList.add(builder.equal((root.get("type").as(DataSourceTypeEnum.class)), dataSourceType));
andList.add(builder.equal((root.get("deleted").as(Boolean.class)), Boolean.FALSE));
return builder.and(andList.toArray(new Predicate[0]));
};
Page<DataApiDataSource> dataApiDataSourcePage = dataApiDataSourceRepository.findAll(spec, toPageable(pageLink));
PageData<DataSourceVo> result = pageToPageData(dataApiDataSourcePage);
List<DataSourceVo> dataSourceVos = new ArrayList<>();
for (DataApiDataSource dataApiDataSource : dataApiDataSourcePage.getContent()) {
dataSourceVos.add(dataApiDataSource.toItem());
}
result.setData(dataSourceVos);
return success(result);
}
@Override
public Result<List<DataTableVo>> queryDataTableListByDataSource(String environment, Long dataSourceId) {
List<DataTableVo> results = new ArrayList<>();
List<DataApiTable> dataApiTables = dataApiTableRepository.findAllByDataSourceIdAndDeletedIsFalse(dataSourceId);
for (DataApiTable dataApiTable : dataApiTables) {
results.add(dataApiTable.toItem());
}
return success(results);
}
@Override
public Result<PageData<DataTableVo>> pageQueryDataTableListByDataSource(String environment, Long dataSourceId, PageLink pageLink) {
Specification<DataApiTable> spec = (root, query, builder) -> {
List<Predicate> andList = new LinkedList<>();
andList.add(builder.equal((root.get("dataSourceId").as(Long.class)), dataSourceId));
andList.add(builder.equal((root.get("deleted").as(Boolean.class)), Boolean.FALSE));
return builder.and(andList.toArray(new Predicate[0]));
};
Page<DataApiTable> dataApiTablePage = dataApiTableRepository.findAll(spec, toPageable(pageLink));
PageData<DataTableVo> result = pageToPageData(dataApiTablePage);
List<DataTableVo> data = new ArrayList<>();
for (DataApiTable dataApiTable : dataApiTablePage.getContent()) {
data.add(dataApiTable.toItem());
}
result.setData(data);
return success(result);
}
@Override
public Result<List<DataFieldVo>> queryDataFieldListByTable(String environment, Long dataTableId) {
List<DataFieldVo> results = new ArrayList<>();
List<DataApiColumn> dataApiColumns = dataApiColumnRepository.findAllByDataTableIdAndDeletedIsFalse(dataTableId);
for (DataApiColumn dataApiColumn : dataApiColumns) {
results.add(dataApiColumn.toItem());
}
return success(results);
}
@Override
public Result<PageData<DataFieldVo>> pageQueryDataFieldListByTable(String environment, Long tableId, PageLink pageLink) {
Specification<DataApiColumn> spec = (root, query, builder) -> {
List<Predicate> andList = new LinkedList<>();
andList.add(builder.equal((root.get("dataTableId").as(Long.class)), tableId));
andList.add(builder.equal((root.get("deleted").as(Boolean.class)), Boolean.FALSE));
return builder.and(andList.toArray(new Predicate[0]));
};
Page<DataApiColumn> dataApiColumnPage = dataApiColumnRepository.findAll(spec, toPageable(pageLink));
PageData result = pageToPageData(dataApiColumnPage);
List<DataFieldVo> data = new ArrayList<>();
for (DataApiColumn dataApiColumn : dataApiColumnPage.getContent()) {
data.add(dataApiColumn.toItem());
}
result.setData(data);
return success(result);
}
}
package com.hisense.dataservice.service.impl;
import com.hisense.dataservice.bo.ColumnDataRangeFilter;
import com.hisense.dataservice.bo.LineDataRangeFilter;
import com.hisense.dataservice.bo.TableJoinRelationConfig;
import com.hisense.dataservice.dto.DataApiModelDto;
import com.hisense.dataservice.entity.DataApiDataSource;
import com.hisense.dataservice.entity.DataApiModel;
import com.hisense.dataservice.entity.DataApiModelConfig;
import com.hisense.dataservice.enums.ModelConfigType;
import com.hisense.dataservice.enums.NetworkEnv;
import com.hisense.dataservice.enums.SystemEnv;
import com.hisense.dataservice.library.model.Result;
import com.hisense.dataservice.repository.DataApiDataSourceRepository;
import com.hisense.dataservice.repository.DataApiModelConfigRepository;
import com.hisense.dataservice.repository.DataApiModelRepository;
import com.hisense.dataservice.service.DataApiServiceManagementService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.interceptor.TransactionAspectSupport;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import javax.transaction.Transactional;
import java.util.Date;
import java.util.List;
import java.util.Optional;
/**
* @author : scc
* @date : 2023/03/06
**/
@Service
@Slf4j
public class DataApiServiceManagementServiceImpl implements DataApiServiceManagementService {
@Autowired
private DataApiDataSourceRepository dataApiDataSourceRepository;
@Autowired
private DataApiModelRepository dataApiModelRepository;
@Autowired
private DataApiModelConfigRepository dataApiModelConfigRepository;
@Transactional
@Override
public Result<Boolean> createOrUpdateDataApi(DataApiModelDto dataApiModelDto) {
// 获取数据源
Long dataSourceId = dataApiModelDto.getDataSourceId();
if (dataSourceId == null) {
return failure("数据源ID为空", false);
}
Optional<DataApiDataSource> dataApiDataSourceOptional = dataApiDataSourceRepository.findById(dataSourceId);
if (!dataApiDataSourceOptional.isPresent()) {
return failure("数据源不存在", false);
}
DataApiDataSource dataApiDataSource = dataApiDataSourceOptional.get();
// 创建或修改数据服务模板
Long modelId = dataApiModelDto.getModelId();
DataApiModel dataApiModel;
if (null == modelId) {
// 创建
dataApiModel = new DataApiModel();
} else {
// 修改
Optional<DataApiModel> dataApiModelOptional = dataApiModelRepository.findById(modelId);
if (!dataApiModelOptional.isPresent()) {
return failure("需要修改数据api服务不存在", false);
}
dataApiModel = dataApiModelOptional.get();
}
List<TableJoinRelationConfig> tableJoinRelationConfig = dataApiModelDto.getTableJoinRelationConfig();
if (!CollectionUtils.isEmpty(tableJoinRelationConfig)) {
dataApiModel.setTableJoinRelationConfig(tableJoinRelationConfig);
}
dataApiModel.setDataSourceId(dataSourceId);
dataApiModel.setNetworkEnv(NetworkEnv.valueOf(dataApiModelDto.getNetworkEnv()));
dataApiModel.setSystemEnv(SystemEnv.valueOf(dataApiModelDto.getSystemEnv()));
dataApiModel.setCreatedTime(new Date());
dataApiModel.setName(dataApiModelDto.getModelName());
dataApiModel.setPublishSystemId(dataApiModelDto.getPublishSystemId());
dataApiModel.setApiID(dataApiModelDto.getApiID());
dataApiModel = dataApiModelRepository.saveAndFlush(dataApiModel);
modelId = dataApiModel.getId();
// 创建或修改数据服务行列筛选条件
DataApiModelConfig dataApiModelConfig;
Optional<DataApiModelConfig> dataApiModelConfigOptional = dataApiModelConfigRepository.findByTypeIdAndType(modelId, ModelConfigType.PUBLISH);
if (!dataApiModelConfigOptional.isPresent()) {
dataApiModelConfig = new DataApiModelConfig();
} else {
dataApiModelConfig = dataApiModelConfigOptional.get();
}
dataApiModelConfig.setTypeId(dataApiModel.getId());
dataApiModelConfig.setType(ModelConfigType.PUBLISH);
List<ColumnDataRangeFilter> columnDataRangeConfig = dataApiModelDto.getColumnDataRangeConfig();
if (!CollectionUtils.isEmpty(columnDataRangeConfig)) {
// 判断表和列是否存在 TODO
dataApiModelConfig.setColumnDataRangeConfig(columnDataRangeConfig);
}
List<LineDataRangeFilter> lineDataRangeConfig = dataApiModelDto.getLineDataRangeConfig();
if (!CollectionUtils.isEmpty(lineDataRangeConfig)) {
// 判断表和列是否存在 TODO
dataApiModelConfig.setLineDataRangeConfig(lineDataRangeConfig);
}
// 生成完整sql
String fullSql = dataApiModelConfig.generateAndSetFullSql(tableJoinRelationConfig, dataApiDataSource.getType());
if (StringUtils.isEmpty(fullSql)) {
// 回滚事务
TransactionAspectSupport.currentTransactionStatus().setRollbackOnly();
return failure("生成数据查询sql失败", false);
}
dataApiModelConfigRepository.saveAndFlush(dataApiModelConfig);
return success("创建数据api服务成功", true);
}
}
package com.hisense.dataservice.util;
import java.io.File;
import java.io.InputStream;
public class FileUtil {
public static InputStream getResourcesFileInputStream(String fileName) {
return Thread.currentThread().getContextClassLoader().getResourceAsStream("" + fileName);
}
public static String getPath() {
return FileUtil.class.getResource("/").getPath();
}
public static File createNewFile(String pathName) {
File file = new File(getPath() + pathName);
if (file.exists()) {
file.delete();
} else {
if (!file.getParentFile().exists()) {
file.getParentFile().mkdirs();
}
}
return file;
}
public static File readFile(String pathName) {
return new File(getPath() + pathName);
}
public static File readUserHomeFile(String pathName) {
return new File(System.getProperty("user.home") + File.separator + pathName);
}
}
\ No newline at end of file
package com.hisense.dataservice.util;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.util.Enumeration;
public class IPUtil {
public static String getIpAddress() {
try {
Enumeration<NetworkInterface> allNetInterfaces = NetworkInterface.getNetworkInterfaces();
InetAddress ip = null;
while (allNetInterfaces.hasMoreElements()) {
NetworkInterface netInterface = (NetworkInterface) allNetInterfaces.nextElement();
if (netInterface.isLoopback() || netInterface.isVirtual() || !netInterface.isUp()) {
continue;
} else {
Enumeration<InetAddress> addresses = netInterface.getInetAddresses();
while (addresses.hasMoreElements()) {
ip = addresses.nextElement();
if (ip != null && ip instanceof Inet4Address) {
return ip.getHostAddress();
}
}
}
}
} catch (Exception e) {
System.err.println("IP地址获取失败" + e.toString());
}
return "";
}
public final static String getOriginIp(HttpServletRequest request)
throws IOException {
// 获取请求主机IP地址,如果通过代理进来,则透过防火墙获取真实IP地址
String ip = request.getHeader("x-forwarded-for");
if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) {
if (ip == null || ip.length() == 0
|| "unknown".equalsIgnoreCase(ip)) {
ip = request.getHeader("Proxy-Client-IP");
}
if (ip == null || ip.length() == 0
|| "unknown".equalsIgnoreCase(ip)) {
ip = request.getHeader("WL-Proxy-Client-IP");
}
if (ip == null || ip.length() == 0
|| "unknown".equalsIgnoreCase(ip)) {
ip = request.getHeader("HTTP_CLIENT_IP");
}
if (ip == null || ip.length() == 0
|| "unknown".equalsIgnoreCase(ip)) {
ip = request.getHeader("HTTP_X_FORWARDED_FOR");
}
if (ip == null || ip.length() == 0
|| "unknown".equalsIgnoreCase(ip)) {
ip = request.getRemoteAddr();
}
} else if (ip.length() > 15) {
String[] ips = ip.split(",");
for (int index = 0; index < ips.length; index++) {
String strIp = (String) ips[index];
if (!("unknown".equalsIgnoreCase(strIp))) {
ip = strIp;
break;
}
}
}
return ip;
}
}
package com.hisense.dataservice.util;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.hisense.dataservice.entity.DataApiDataSource;
import com.hisense.dataservice.library.model.Result;
import lombok.extern.slf4j.Slf4j;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
@Slf4j
public class JdbcUtil {
// for test
public static Result<Long> testCountSql() {
Integer result = 1100;
String threadName = Thread.currentThread().getName();
log.info("thread: {}, start...", threadName);
try {
Thread.sleep(1000 * new Random().nextInt(5));
} catch (InterruptedException e) {
e.printStackTrace();
}
log.info("thread: {}, end...", threadName);
return new Result<>(result.longValue());
}
// for test
public static Result<List<List<Object>>> testExecuteSql(Long pageNumber) {
List<List<Object>> jsonObjects = new ArrayList<>();
String threadName = Thread.currentThread().getName();
log.info("thread: {}, start...", threadName);
try {
Thread.sleep(1000 * new Random().nextInt(5));
} catch (InterruptedException e) {
e.printStackTrace();
}
for (int i = 0; i < 10; i++) {
List<Object> jsonObject = new ArrayList<>();
jsonObject.add("number:" + pageNumber + "scc_" + i);
jsonObject.add(i + 10);
jsonObject.add(i + 11);
jsonObject.add(i + 12);
jsonObject.add(i + 13);
jsonObject.add(i + 14);
jsonObjects.add(jsonObject);
}
log.info("thread: {}, end...", threadName);
return new Result<>(jsonObjects);
}
public static Result<List<JSONObject>> executeSql(DataApiDataSource datasource, String sql, List<Object> jdbcParamValues, Integer batchSize) {
String threadName = Thread.currentThread().getName();
log.info("thread: {}, sql:{}", threadName, sql);
log.info("thread: {}, jdbcParamValues 字符串:{}", threadName, JSON.toJSONString(jdbcParamValues));
List<JSONObject> result = new ArrayList<>();
DruidPooledConnection connection = null;
try {
connection = PoolManager.getPooledConnection(datasource);
log.info("thread: {}, 获取连接成功执行下一步", threadName);
PreparedStatement statement = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
//参数注入
for (int i = 1; i <= jdbcParamValues.size(); i++) {
statement.setObject(i, jdbcParamValues.get(i - 1));
}
statement.setFetchSize(batchSize);
statement.setFetchDirection(ResultSet.FETCH_REVERSE);
ResultSet rs = statement.executeQuery();
int columnCount = rs.getMetaData().getColumnCount();
List<String> columns = new ArrayList<>();
for (int i = 1; i <= columnCount; i++) {
String columnName = rs.getMetaData().getColumnLabel(i);
columns.add(columnName);
}
while (rs.next()) {
JSONObject jo = new JSONObject();
columns.stream().forEach(t -> {
try {
Object value = rs.getObject(t);
jo.put(t, value);
} catch (SQLException e) {
log.error("thread: {}, 列赋值异常信息:{}", threadName, e.getMessage());
}
});
result.add(jo);
}
log.info("thread: {}, 返回的数据:{}", threadName, result);
return new Result("200", "查询成功", result);
} catch (SQLException sqlException) {
log.error("[{}] 数据库连接失败:", datasource.getSourceName(), sqlException);
return new Result("600", sqlException.getMessage(), result);
} catch (Exception e) {
log.error("thread: {}, sql: {}, params: {}, 分页查询数据异常:", threadName, sql, jdbcParamValues, e);
return new Result("500", e.getMessage(), result);
} finally {
try {
if (connection != null)
connection.close();
} catch (SQLException e) {
log.error("关闭数据库连接异常信息:{}", e.getMessage());
}
}
}
public static Result<Long> executeCountSql(DataApiDataSource datasource, String sql, List<Object> jdbcParamValues) {
log.info("sql:{}", sql);
DruidPooledConnection connection = null;
try {
connection = PoolManager.getPooledConnection(datasource);
log.info("获取连接成功执行下一步");
PreparedStatement statement = connection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
//参数注入
for (int i = 1; i <= jdbcParamValues.size(); i++) {
statement.setObject(i, jdbcParamValues.get(i - 1));
}
statement.setFetchSize(1);
statement.setFetchDirection(ResultSet.FETCH_REVERSE);
ResultSet rs = statement.executeQuery();
Long count = 0L;
while (rs.next()) {
count = rs.getLong(1);
}
log.info("返回的数据:{}", count);
return new Result("200", "查询成功", count);
} catch (SQLException e) {
log.error("异常信息:{}", e.getMessage());
return new Result().setError("500", e.getMessage());
} finally {
try {
if (connection != null)
connection.close();
} catch (SQLException e) {
log.error("错误信息:{}", e.getMessage());
}
}
}
}
package com.hisense.dataservice.util;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicLong;
public class NamedThreadFactoryImpl implements ThreadFactory {
private final AtomicLong threadIndex;
private final String threadNamePrefix;
private final boolean daemon;
public NamedThreadFactoryImpl(String threadNamePrefix) {
this(threadNamePrefix, false);
}
public NamedThreadFactoryImpl(String threadNamePrefix, boolean daemon) {
this.threadIndex = new AtomicLong(0L);
this.threadNamePrefix = threadNamePrefix + "-";
this.daemon = daemon;
}
public Thread newThread(Runnable r) {
Thread thread = new Thread(r, this.threadNamePrefix + this.threadIndex.incrementAndGet());
thread.setDaemon(this.daemon);
return thread;
}
}
\ No newline at end of file
package com.hisense.dataservice.util;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.hisense.dataservice.entity.DataApiDataSource;
import lombok.extern.slf4j.Slf4j;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
@Slf4j
public class PoolManager {
private static Lock lock = new ReentrantLock();
private static Lock deleteLock = new ReentrantLock();
//所有数据源的连接池存在map里
static Map<String, DruidDataSource> map = new HashMap<>();
public static DruidDataSource getJdbcConnectionPool(DataApiDataSource ds) {
if (map.containsKey(ds.getId())) {
return map.get(ds.getId());
} else {
lock.lock();
try {
log.info(Thread.currentThread().getName() + "获取锁");
if (!map.containsKey(ds.getId())) {
DruidDataSource druidDataSource = new DruidDataSource();
druidDataSource.setName(ds.getSourceName());
druidDataSource.setUrl(ds.getUrl());
druidDataSource.setUsername(ds.getUsername());
druidDataSource.setPassword(ds.getPassword());
druidDataSource.setDriverClassName(ds.getDriver());
druidDataSource.setConnectionErrorRetryAttempts(3); //失败后重连次数
druidDataSource.setBreakAfterAcquireFailure(true);
map.put(ds.getId().toString(), druidDataSource);
log.info("创建Druid连接池成功:{}", ds.getSourceName());
}
return map.get(ds.getId().toString());
} catch (Exception e) {
log.error("sourceName: [{}], url: [{}],创建数据连接源失败: ",ds.getSourceName(), ds.getUrl(), e);
return null;
} finally {
lock.unlock();
}
}
}
//删除数据库连接池
public static void removeJdbcConnectionPool(String id) {
deleteLock.lock();
try {
DruidDataSource druidDataSource = map.get(id);
if (druidDataSource != null) {
druidDataSource.close();
map.remove(id);
}
} catch (Exception e) {
log.error(e.toString());
} finally {
deleteLock.unlock();
}
}
public static DruidPooledConnection getPooledConnection(DataApiDataSource ds) throws SQLException {
DruidDataSource pool = PoolManager.getJdbcConnectionPool(ds);
DruidPooledConnection connection = null;
if(pool != null){
connection = pool.getConnection();
log.info("获取连接成功");
return connection;
}else{
throw new SQLException("获取数据库连接源失败");
}
}
}
package com.hisense.dataservice.util;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import javax.annotation.PreDestroy;
import java.net.ConnectException;
import java.net.SocketTimeoutException;
import java.util.concurrent.*;
/**
* @author : scc
* @date : 2023/02/15
* 线程池管理
**/
@Slf4j
@Component
@Scope("prototype")
public class ThreadPoolManagerUtil {
/**任务队列**/
private LinkedTransferQueue<Runnable> taskQueue = new LinkedTransferQueue<>();
@Getter
/**核心线程个数**/
private int corePoolSize = Runtime.getRuntime().availableProcessors()*2;
@Getter
/**最大线程个数 **/
private int maximumPoolSize = Runtime.getRuntime().availableProcessors()*5;
/**保持心跳时间 **/
private int keepAliveTime = 1;
@Getter
private ThreadPoolExecutor threadPool;
public ThreadPoolManagerUtil(){
this("corePool");
}
/**
* 线程池构造方法
*/
public ThreadPoolManagerUtil(String poolName){
threadPool = new ThreadPoolExecutor(corePoolSize, maximumPoolSize,
keepAliveTime, TimeUnit.SECONDS, taskQueue,
new NamedThreadFactoryImpl(poolName)){
public void afterExecute(Runnable r, Throwable t) {
super.afterExecute(r, t);
printException(r, t);
}
};
}
private void printException(Runnable r, Throwable t) {
if (t == null && r instanceof Future<?>) {
try {
Future<?> future = (Future<?>) r;
if (future.isDone())
future.get();
} catch (CancellationException ce) {
t = ce;
} catch (ExecutionException ee) {
t = ee.getCause();
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
}
if (t != null){
Throwable cause = t.getCause();
if(isTimeoutThrowable(cause)){
log.error("系统自有线程池任务调用超时异常,error_msg=="+cause.getMessage());
}else{
log.error("系统自有线程池任务异常,error_msg=="+t.getMessage(), t);
}
}
}
/**
* 任务添加到线程池中
* @param paramRunnable
*/
public Future<?> addExecuteTask(Runnable paramRunnable){
if(paramRunnable == null)
return null;
return this.threadPool.submit(paramRunnable);
}
public Future<?> addExecuteTask(Callable<?> paramRunnable){
if(paramRunnable == null)
return null;
return this.threadPool.submit(paramRunnable);
}
//判断是否超时或者网络方面的异常
public boolean isTimeoutThrowable(Throwable cause){
if(cause != null && (cause instanceof TimeoutException
|| cause instanceof SocketTimeoutException
|| cause instanceof ConnectException)){
return true;
}else{
return false;
}
}
@PreDestroy
public void stop(){
if(threadPool != null){
threadPool.shutdown();
}
}
}
package com.hisense.dataservice.vo;
import io.swagger.annotations.ApiModelProperty;
/**
* @author : scc
* @date : 2023/02/24
**/
public class DataApiModelVo {
@ApiModelProperty(name = "数据源ID")
private Long dataSourceId;
@ApiModelProperty(name = "数据源名称")
private Long dataSourceName;
}
package com.hisense.dataservice.vo;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
* @author : scc
* @date : 2023/03/06
**/
@Data
@ApiModel("数据字段")
public class DataFieldVo {
@ApiModelProperty(name = "数据字段ID")
private Long fieldId;
@ApiModelProperty(name = "数据表ID")
private Long dataTableId;
@ApiModelProperty(name = "字段名")
private String fieldName;
@ApiModelProperty(name = "字段类型")
private String filedType;
@ApiModelProperty(name = "字段描述")
private String desc;
}
package com.hisense.dataservice.vo;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
/**
* @author : scc
* @date : 2023/02/24
* 数据源VO
**/
@Data
@ApiModel("数据源")
public class DataSourceVo {
@ApiModelProperty(name = "数据源ID")
private Long id;
@ApiModelProperty(name = "数据源名称")
private String name;
@ApiModelProperty(name = "数据源类型")
private String type;
@ApiModelProperty(name = "备注")
protected String description;
@ApiModelProperty(name = "创建时间")
@JsonFormat(shape = JsonFormat.Shape.STRING,pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
protected Date createTime;
@ApiModelProperty(name = "修改时间")
@JsonFormat(shape = JsonFormat.Shape.STRING,pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
protected Date updateTime;
}
package com.hisense.dataservice.vo;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
* @author : scc
* @date : 2023/03/06
**/
@Data
@ApiModel("数据表")
public class DataTableVo {
@ApiModelProperty(name = "数据表Id")
private Long tableId;
@ApiModelProperty(name = "数据表名")
private String tableName;
@ApiModelProperty(name = "数据源ID")
private Long dataSourceId;
@ApiModelProperty(name = "数据表描述")
protected String description;
}
spring:
application:
name: data-service # 应用名称
jpa:
generate-ddl: true
database: oracle
hibernate:
ddl-auto: update
show-sql: true
datasource:
url: "jdbc:oracle:thin:@192.168.78.132:1521:helowin"
password: "system"
username: "system"
driver-class-name: oracle.jdbc.driver.OracleDriver
server:
port=8080 # 应用服务 WEB 访问端口
package com.hisense.dataservice;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class DataServiceApplicationTests {
@Test
void contextLoads() {
}
}
package com.hisense.dataservice.repository;
import com.hisense.dataservice.DataServiceApplication;
import com.hisense.dataservice.entity.DataApiColumn;
import com.hisense.dataservice.enums.FieldTypeEnum;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
/**
* @author : scc
* @date : 2023/02/23
**/
@SpringBootTest(classes = DataServiceApplication.class)
@RunWith(SpringRunner.class)
public class DataApiColumnRepositoryTest {
@Autowired
private DataApiColumnRepository dataApiColumnRepository;
@Test
public void testSave(){
DataApiColumn dataApiColumn = new DataApiColumn();
dataApiColumn.setDataTableId(1L);
dataApiColumn.setFieldName("id");
dataApiColumn.setFiledType(FieldTypeEnum.LONG);
dataApiColumnRepository.save(dataApiColumn);
}
}
package com.hisense.dataservice.repository;
import com.hisense.dataservice.DataServiceApplication;
import com.hisense.dataservice.entity.DataApiDataSource;
import com.hisense.dataservice.enums.DataSourceTypeEnum;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.List;
/**
* @author : scc
* @date : 2023/02/23
**/
@SpringBootTest(classes = DataServiceApplication.class)
@RunWith(SpringRunner.class)
public class DataSourceRepositoryTest {
@Autowired
private DataApiDataSourceRepository dataSourceRepository;
@Test
public void testSaveAndSearch(){
DataApiDataSource dataApiDataSource = new DataApiDataSource();
dataApiDataSource.setSourceName("test");
dataApiDataSource.setUsername("fff");
dataApiDataSource.setUrl("url");
dataApiDataSource.setPassword("pass");
dataApiDataSource.setType(DataSourceTypeEnum.MYSQL);
dataSourceRepository.save(dataApiDataSource);
// 查询
List<DataApiDataSource> all = dataSourceRepository.findAll();
for (DataApiDataSource apiDataSource : all) {
System.out.println(apiDataSource);
System.out.println("deleted: " + apiDataSource.getDeleted());
}
// 删除
//dataSourceRepository.deleteInBatch(all);
}
}
package com.hisense.dataservice.repository;
import com.hisense.dataservice.DataServiceApplication;
import com.hisense.dataservice.entity.DataApiTable;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
/**
* @author : scc
* @date : 2023/02/23
**/
@SpringBootTest(classes = DataServiceApplication.class)
@RunWith(SpringRunner.class)
public class DataTableRepositoryTest {
@Autowired
private DataApiTableRepository dataApiTableRepository;
@Test
public void testSave(){
DataApiTable dataApiTable = new DataApiTable();
dataApiTable.setTableName("student");
dataApiTable.setDataSourceId(11L);
dataApiTableRepository.save(dataApiTable);
// dataApiTableRepository.deleteAll();
}
}
package com.hisense.dataservice.utils;
import com.alibaba.excel.EasyExcel;
import com.alibaba.excel.annotation.ExcelIgnore;
import com.alibaba.excel.annotation.ExcelProperty;
import com.alibaba.excel.annotation.write.style.ColumnWidth;
import com.alibaba.excel.util.ListUtils;
import lombok.Data;
import org.junit.Test;
import org.springframework.boot.test.context.SpringBootTest;
import java.util.*;
@SpringBootTest
public class ExcelTest {
private String path = "D:\\test\\";
/**
* 最简单的写
* <p>
* 1. 创建excel对应的实体对象 参照{@link DemoData}
* <p>
* 2. 直接写即可
*/
@Test
public void simpleWrite() {
// 注意 simpleWrite在数据量不大的情况下可以使用(5000以内,具体也要看实际情况),数据量大参照 重复多次写入
// 写法1 JDK8+
// since: 3.0.0-beta1
String fileName = path + "simpleWrite" + System.currentTimeMillis() + ".xlsx";
// 这里 需要指定写用哪个class去写,然后写到第一个sheet,名字为模板 然后文件流会自动关闭
// 如果这里想使用03 则 传入excelType参数即可
// EasyExcel.write(fileName, DemoData.class)
// .sheet("模板")
// .doWrite(() -> {
// // 分页查询数据
// return data();
// });
// 写法2
// fileName = TestFileUtil.getPath() + "simpleWrite" + System.currentTimeMillis() + ".xlsx";
// // 这里 需要指定写用哪个class去写,然后写到第一个sheet,名字为模板 然后文件流会自动关闭
// // 如果这里想使用03 则 传入excelType参数即可
// EasyExcel.write(fileName, DemoData.class).sheet("模板").doWrite(data());
//
// // 写法3
// fileName = TestFileUtil.getPath() + "simpleWrite" + System.currentTimeMillis() + ".xlsx";
// // 这里 需要指定写用哪个class去写
// try (ExcelWriter excelWriter = EasyExcel.write(fileName, DemoData.class).build()) {
// for (int i = 0; i < 5; i++) {
// WriteSheet writeSheet = EasyExcel.writerSheet("模板" + i).build();
// excelWriter.write(data(), writeSheet);
// }
//
// }
EasyExcel.write(fileName)
// 这里放入动态头
.head(head()).sheet("模板")
// 当然这里数据也可以用 List<List<String>> 去传入
.doWrite(data());
}
private List<List<String>> variableTitleHead() {
List<List<String>> list = ListUtils.newArrayList();
List<String> head0 = ListUtils.newArrayList();
head0.add("string" + System.currentTimeMillis());
List<String> head1 = ListUtils.newArrayList();
head1.add("number" + System.currentTimeMillis());
List<String> head2 = ListUtils.newArrayList();
head2.add("date" + System.currentTimeMillis());
list.add(head0);
list.add(head1);
list.add(head2);
return list;
}
private List<List<String>> head() {
List<List<String>> list = ListUtils.newArrayList();
List<String> head0 = ListUtils.newArrayList();
head0.add("字符串" + System.currentTimeMillis());
List<String> head1 = ListUtils.newArrayList();
head1.add("数字" + System.currentTimeMillis());
List<String> head2 = ListUtils.newArrayList();
head2.add("日期" + System.currentTimeMillis());
list.add(head0);
list.add(head1);
list.add(head2);
return list;
}
private List<List<Object>> dataList() {
List<List<Object>> list = ListUtils.newArrayList();
for (int i = 0; i < 10; i++) {
List<Object> data = ListUtils.newArrayList();
data.add("字符串" + i);
data.add(0.56);
data.add(new Date());
list.add(data);
}
return list;
}
private List<List<Object>> data() {
// List<DemoData> list = ListUtils.newArrayList();
// for (int i = 0; i < 10; i++) {
// DemoData data = new DemoData();
// data.setString("字符串" + i);
// data.setDate(new Date());
// data.setDoubleData(0.56);
// list.add(data);
// }
// return list;
List<List<Object>> jsonObjects = new ArrayList<>();
for (int i = 0; i < 10; i++) {
List<Object> list= new ArrayList<>();
list.add("scc_"+i);
list.add(i+10);
list.add(i+12);
jsonObjects.add(list);
}
return jsonObjects;
}
@Data
class DemoData{
/**@ColumnWidth 字段宽度,@ExcelProperty表头名 @ExcelIgnore不导出的字段*/
@ColumnWidth(15)
@ExcelProperty("字符串")
private String string;
@ColumnWidth(20)
@ExcelProperty("日期")
private Date date;
@ColumnWidth(15)
@ExcelProperty("数据")
private Double doubleData;
@ExcelIgnore
private String name;
}
}
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论