Commit c7e6d1d8 by inrgihc

修正分页查询错误

parent d68dc957
...@@ -28,4 +28,5 @@ public class ProductContext { ...@@ -28,4 +28,5 @@ public class ProductContext {
private boolean hasCatalogAndSchema; private boolean hasCatalogAndSchema;
private Function<String, Pair<String, String>> adapter; private Function<String, Pair<String, String>> adapter;
private String pageSql; private String pageSql;
private ThreeConsumer<Integer, Integer, List<Object>> pageConsumer;
} }
package com.gitee.sqlrest.common.dto;
@FunctionalInterface
public interface ThreeConsumer<X, Y, Z> {
void accept(X arg1, Y arg2, Z arg3);
}
package com.gitee.sqlrest.common.enums; package com.gitee.sqlrest.common.enums;
import com.gitee.sqlrest.common.dto.ProductContext; import com.gitee.sqlrest.common.dto.ProductContext;
import com.gitee.sqlrest.common.dto.ThreeConsumer;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import lombok.Getter; import lombok.Getter;
...@@ -28,7 +29,12 @@ public enum ProductTypeEnum { ...@@ -28,7 +29,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT `SCHEMA_NAME` FROM `information_schema`.`SCHEMATA`") .sqlSchemaList("SELECT `SCHEMA_NAME` FROM `information_schema`.`SCHEMATA`")
.adapter(database -> Pair.of(database, null)) .adapter(database -> Pair.of(database, null))
.pageSql("select * from (%s) alias limit ? OFFSET ? ") .pageSql("select * from (%s) alias limit ? OFFSET ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* MariaDB数据库类型 * MariaDB数据库类型
*/ */
...@@ -47,7 +53,12 @@ public enum ProductTypeEnum { ...@@ -47,7 +53,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT `SCHEMA_NAME` FROM `information_schema`.`SCHEMATA`") .sqlSchemaList("SELECT `SCHEMA_NAME` FROM `information_schema`.`SCHEMATA`")
.adapter(database -> Pair.of(database, null)) .adapter(database -> Pair.of(database, null))
.pageSql("select * from (%s) alias limit ? OFFSET ? ") .pageSql("select * from (%s) alias limit ? OFFSET ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* Oracle数据库类型 * Oracle数据库类型
*/ */
...@@ -65,10 +76,16 @@ public enum ProductTypeEnum { ...@@ -65,10 +76,16 @@ public enum ProductTypeEnum {
.urlSample("jdbc:oracle:thin:@172.17.2.10:1521:ORCL") .urlSample("jdbc:oracle:thin:@172.17.2.10:1521:ORCL")
.sqlSchemaList("SELECT USERNAME FROM SYS.ALL_USERS") .sqlSchemaList("SELECT USERNAME FROM SYS.ALL_USERS")
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("SELECT * FROM ( SELECT ALIAS.*, ROWNUM ROW_ID FROM ( %s ) ALIAS WHERE ROWNUM <= ? ) WHERE ROW_ID > ?") .pageSql(
.build()), "SELECT * FROM ( SELECT ALIAS.*, ROWNUM ROW_ID FROM ( %s ) ALIAS WHERE ROWNUM <= ? ) WHERE ROW_ID > ?")
.pageConsumer(
(page, size, parameters) -> {
parameters.add(page * size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* Microsoft SQL Server数据库类型(>=2005) * Microsoft SQL Server数据库类型(>=2012)
*/ */
SQLSERVER( SQLSERVER(
ProductContext.builder() ProductContext.builder()
...@@ -85,7 +102,12 @@ public enum ProductTypeEnum { ...@@ -85,7 +102,12 @@ public enum ProductTypeEnum {
.hasCatalogAndSchema(true) .hasCatalogAndSchema(true)
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("SELECT * FROM (%s) ALIAS OFFSET ? ROWS FETCH NEXT ? ROWS ONLY") .pageSql("SELECT * FROM (%s) ALIAS OFFSET ? ROWS FETCH NEXT ? ROWS ONLY")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add((page - 1) * size);
parameters.add(size);
}
).build()),
/** /**
* PostgreSQL数据库类型 * PostgreSQL数据库类型
...@@ -104,7 +126,12 @@ public enum ProductTypeEnum { ...@@ -104,7 +126,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT schema_name FROM information_schema.schemata ") .sqlSchemaList("SELECT schema_name FROM information_schema.schemata ")
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("select * from (%s) alias limit ? offset ? ") .pageSql("select * from (%s) alias limit ? offset ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* DB2数据库类型 * DB2数据库类型
...@@ -125,7 +152,12 @@ public enum ProductTypeEnum { ...@@ -125,7 +152,12 @@ public enum ProductTypeEnum {
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql( .pageSql(
"SELECT * FROM (SELECT TMP_PAGE.*,ROWNUMBER() OVER() AS ROW_ID FROM ( %s ) AS TMP_PAGE) ALIAS WHERE ROW_ID BETWEEN ? AND ?") "SELECT * FROM (SELECT TMP_PAGE.*,ROWNUMBER() OVER() AS ROW_ID FROM ( %s ) AS TMP_PAGE) ALIAS WHERE ROW_ID BETWEEN ? AND ?")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add((page - 1) * size);
parameters.add(page * size);
}
).build()),
/** /**
* [国产] 达梦(DM)数据库类型 * [国产] 达梦(DM)数据库类型
...@@ -144,7 +176,12 @@ public enum ProductTypeEnum { ...@@ -144,7 +176,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT DISTINCT object_name FROM ALL_OBJECTS WHERE OBJECT_TYPE = 'SCH'") .sqlSchemaList("SELECT DISTINCT object_name FROM ALL_OBJECTS WHERE OBJECT_TYPE = 'SCH'")
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("select * from (%s) alias limit ? offset ? ") .pageSql("select * from (%s) alias limit ? offset ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* [国产] 金仓(Kingbase)数据库类型 * [国产] 金仓(Kingbase)数据库类型
...@@ -163,7 +200,12 @@ public enum ProductTypeEnum { ...@@ -163,7 +200,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT schema_name FROM information_schema.schemata ") .sqlSchemaList("SELECT schema_name FROM information_schema.schemata ")
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("select * from (%s) alias limit ? offset ? ") .pageSql("select * from (%s) alias limit ? offset ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* [国产] 神通(Oscar)数据库类型 * [国产] 神通(Oscar)数据库类型
...@@ -182,7 +224,12 @@ public enum ProductTypeEnum { ...@@ -182,7 +224,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT schema_name FROM information_schema.schemata ") .sqlSchemaList("SELECT schema_name FROM information_schema.schemata ")
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("select * from (%s) alias limit ? offset ? ") .pageSql("select * from (%s) alias limit ? offset ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* [国产] 南大通用(GBase8A)数据库类型 * [国产] 南大通用(GBase8A)数据库类型
...@@ -201,7 +248,12 @@ public enum ProductTypeEnum { ...@@ -201,7 +248,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT schema_name FROM information_schema.schemata ") .sqlSchemaList("SELECT schema_name FROM information_schema.schemata ")
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("select * from (%s) alias limit ? offset ? ") .pageSql("select * from (%s) alias limit ? offset ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* Sybase 数据库类型 * Sybase 数据库类型
...@@ -220,7 +272,12 @@ public enum ProductTypeEnum { ...@@ -220,7 +272,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT schema_name FROM information_schema.schemata ") .sqlSchemaList("SELECT schema_name FROM information_schema.schemata ")
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("SELECT * FROM (%s) ALIAS OFFSET ? ROWS FETCH NEXT ? ROWS ONLY") .pageSql("SELECT * FROM (%s) ALIAS OFFSET ? ROWS FETCH NEXT ? ROWS ONLY")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add((page - 1) * size);
parameters.add(size);
}
).build()),
/** /**
* Hive 数据库类型 * Hive 数据库类型
...@@ -239,7 +296,12 @@ public enum ProductTypeEnum { ...@@ -239,7 +296,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SHOW DATABASES") .sqlSchemaList("SHOW DATABASES")
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("select * from (%s) alias limit ? OFFSET ? ") .pageSql("select * from (%s) alias limit ? OFFSET ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* Sqlite v3数据库类型 * Sqlite v3数据库类型
...@@ -260,7 +322,12 @@ public enum ProductTypeEnum { ...@@ -260,7 +322,12 @@ public enum ProductTypeEnum {
.retSchemaList(Collections.singletonList("sqlite_master")) .retSchemaList(Collections.singletonList("sqlite_master"))
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("select * from (%s) alias limit ? offset ? ") .pageSql("select * from (%s) alias limit ? offset ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* OpenGauss数据库类型 * OpenGauss数据库类型
...@@ -279,7 +346,12 @@ public enum ProductTypeEnum { ...@@ -279,7 +346,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT schema_name FROM information_schema.schemata ") .sqlSchemaList("SELECT schema_name FROM information_schema.schemata ")
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("select * from (%s) alias limit ? offset ? ") .pageSql("select * from (%s) alias limit ? offset ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* ClickHouse数据库类型 * ClickHouse数据库类型
...@@ -298,7 +370,12 @@ public enum ProductTypeEnum { ...@@ -298,7 +370,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT schema_name FROM information_schema.schemata ") .sqlSchemaList("SELECT schema_name FROM information_schema.schemata ")
.adapter(database -> Pair.of(null, database)) .adapter(database -> Pair.of(null, database))
.pageSql("select * from (%s) alias limit ? OFFSET ? ") .pageSql("select * from (%s) alias limit ? OFFSET ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* Doris数据库类型 * Doris数据库类型
...@@ -318,7 +395,12 @@ public enum ProductTypeEnum { ...@@ -318,7 +395,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT `SCHEMA_NAME` FROM `information_schema`.`SCHEMATA`") .sqlSchemaList("SELECT `SCHEMA_NAME` FROM `information_schema`.`SCHEMATA`")
.adapter(database -> Pair.of(database, null)) .adapter(database -> Pair.of(database, null))
.pageSql("select * from (%s) alias limit ? OFFSET ? ") .pageSql("select * from (%s) alias limit ? OFFSET ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* StarRocks数据库类型 * StarRocks数据库类型
...@@ -338,10 +420,15 @@ public enum ProductTypeEnum { ...@@ -338,10 +420,15 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT `SCHEMA_NAME` FROM `information_schema`.`SCHEMATA`") .sqlSchemaList("SELECT `SCHEMA_NAME` FROM `information_schema`.`SCHEMATA`")
.adapter(database -> Pair.of(database, null)) .adapter(database -> Pair.of(database, null))
.pageSql("select * from (%s) alias limit ? OFFSET ? ") .pageSql("select * from (%s) alias limit ? OFFSET ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
/** /**
* OceanBase数据库类型 * OceanBase数据库类型(MySQL方言)
*/ */
OCEANBASE( OCEANBASE(
ProductContext.builder() ProductContext.builder()
...@@ -358,7 +445,12 @@ public enum ProductTypeEnum { ...@@ -358,7 +445,12 @@ public enum ProductTypeEnum {
.sqlSchemaList("SELECT `SCHEMA_NAME` FROM `information_schema`.`SCHEMATA`") .sqlSchemaList("SELECT `SCHEMA_NAME` FROM `information_schema`.`SCHEMATA`")
.adapter(database -> Pair.of(database, null)) .adapter(database -> Pair.of(database, null))
.pageSql("select * from (%s) alias limit ? OFFSET ? ") .pageSql("select * from (%s) alias limit ? OFFSET ? ")
.build()), .pageConsumer(
(page, size, parameters) -> {
parameters.add(size);
parameters.add((page - 1) * size);
}
).build()),
; ;
private ProductContext context; private ProductContext context;
...@@ -399,6 +491,10 @@ public enum ProductTypeEnum { ...@@ -399,6 +491,10 @@ public enum ProductTypeEnum {
return this.context.getTestSql(); return this.context.getTestSql();
} }
public ThreeConsumer getPageConsumer() {
return this.context.getPageConsumer();
}
public boolean hasDatabaseName() { public boolean hasDatabaseName() {
return !Arrays.asList(DM, SQLITE3, MYSQL, MARIADB, GBASE8A).contains(this); return !Arrays.asList(DM, SQLITE3, MYSQL, MARIADB, GBASE8A).contains(this);
} }
......
package com.gitee.sqlrest.core.exec.engine.impl; package com.gitee.sqlrest.core.exec.engine.impl;
import cn.hutool.core.util.NumberUtil;
import com.gitee.sqlrest.common.consts.Constants;
import com.gitee.sqlrest.common.enums.NamingStrategyEnum; import com.gitee.sqlrest.common.enums.NamingStrategyEnum;
import com.gitee.sqlrest.common.enums.ProductTypeEnum; import com.gitee.sqlrest.common.enums.ProductTypeEnum;
import com.gitee.sqlrest.core.exec.engine.AbstractExecutorEngine; import com.gitee.sqlrest.core.exec.engine.AbstractExecutorEngine;
import com.gitee.sqlrest.core.util.PageSizeUtils;
import com.gitee.sqlrest.core.util.SqlJdbcUtils; import com.gitee.sqlrest.core.util.SqlJdbcUtils;
import com.gitee.sqlrest.persistence.entity.ApiContextEntity; import com.gitee.sqlrest.persistence.entity.ApiContextEntity;
import com.gitee.sqlrest.template.Configuration; import com.gitee.sqlrest.template.Configuration;
...@@ -35,12 +34,8 @@ public class SqlExecutorService extends AbstractExecutorEngine { ...@@ -35,12 +34,8 @@ public class SqlExecutorService extends AbstractExecutorEngine {
for (ApiContextEntity sql : scripts) { for (ApiContextEntity sql : scripts) {
SqlTemplate template = cfg.getTemplate(sql.getSqlText()); SqlTemplate template = cfg.getTemplate(sql.getSqlText());
SqlMeta sqlMeta = template.process(params); SqlMeta sqlMeta = template.process(params);
int page = (null == params.get(Constants.PARAM_PAGE_NUMBER)) int page = PageSizeUtils.getPageFromParams(params);
? 1 int size = PageSizeUtils.getSizeFromParams(params);
: NumberUtil.parseInt(params.get(Constants.PARAM_PAGE_NUMBER).toString());
int size = (null == params.get(Constants.PARAM_PAGE_SIZE))
? 10
: NumberUtil.parseInt(params.get(Constants.PARAM_PAGE_SIZE).toString());
dataList.add(SqlJdbcUtils.execute(productType, connection, sqlMeta, strategy, page, size)); dataList.add(SqlJdbcUtils.execute(productType, connection, sqlMeta, strategy, page, size));
} }
connection.commit(); connection.commit();
......
...@@ -7,6 +7,7 @@ import com.gitee.sqlrest.common.enums.ProductTypeEnum; ...@@ -7,6 +7,7 @@ import com.gitee.sqlrest.common.enums.ProductTypeEnum;
import com.gitee.sqlrest.core.exec.annotation.Comment; import com.gitee.sqlrest.core.exec.annotation.Comment;
import com.gitee.sqlrest.core.exec.annotation.Module; import com.gitee.sqlrest.core.exec.annotation.Module;
import com.gitee.sqlrest.core.util.ConvertUtils; import com.gitee.sqlrest.core.util.ConvertUtils;
import com.gitee.sqlrest.core.util.PageSizeUtils;
import com.gitee.sqlrest.template.Configuration; import com.gitee.sqlrest.template.Configuration;
import com.gitee.sqlrest.template.SqlMeta; import com.gitee.sqlrest.template.SqlMeta;
import com.gitee.sqlrest.template.SqlTemplate; import com.gitee.sqlrest.template.SqlTemplate;
...@@ -109,14 +110,9 @@ public class DbVarModule { ...@@ -109,14 +110,9 @@ public class DbVarModule {
SqlMeta sqlMeta = template.process(params); SqlMeta sqlMeta = template.process(params);
String pageSql = productType.getPageSql(sqlMeta.getSql()); String pageSql = productType.getPageSql(sqlMeta.getSql());
List<Object> parameters = sqlMeta.getParameter(); List<Object> parameters = sqlMeta.getParameter();
int page = (null == params.get(Constants.PARAM_PAGE_NUMBER)) int page = PageSizeUtils.getPageFromParams(params);
? 1 int size = PageSizeUtils.getSizeFromParams(params);
: NumberUtil.parseInt(params.get(Constants.PARAM_PAGE_NUMBER).toString()); this.productType.getPageConsumer().accept(page, size, parameters);
int size = (null == params.get(Constants.PARAM_PAGE_SIZE))
? 10
: NumberUtil.parseInt(params.get(Constants.PARAM_PAGE_SIZE).toString());
parameters.add(((page - 1) * size) < 0 ? 0 : (page - 1) * size);
parameters.add(size);
return build(jdbcTemplate.queryForList(pageSql, parameters.toArray())); return build(jdbcTemplate.queryForList(pageSql, parameters.toArray()));
} }
......
package com.gitee.sqlrest.core.gateway; package com.gitee.sqlrest.core.gateway;
import static com.gitee.sqlrest.common.consts.Constants.GATEWAY_APPLICATION_NAME;
import com.gitee.sqlrest.common.dto.ResultEntity;
import com.gitee.sqlrest.common.enums.OnOffEnum; import com.gitee.sqlrest.common.enums.OnOffEnum;
import com.gitee.sqlrest.common.enums.WhiteBlackEnum; import com.gitee.sqlrest.common.enums.WhiteBlackEnum;
import com.gitee.sqlrest.core.dto.UpdateFirewallRulesRequest; import com.gitee.sqlrest.core.dto.UpdateFirewallRulesRequest;
import com.gitee.sqlrest.persistence.dao.FirewallRulesDao; import com.gitee.sqlrest.persistence.dao.FirewallRulesDao;
import com.gitee.sqlrest.persistence.entity.FirewallRulesEntity; import com.gitee.sqlrest.persistence.entity.FirewallRulesEntity;
import java.util.Arrays; import java.util.Arrays;
import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
...@@ -18,10 +14,7 @@ import javax.annotation.Resource; ...@@ -18,10 +14,7 @@ import javax.annotation.Resource;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.util.Strings; import org.apache.logging.log4j.util.Strings;
import org.springframework.cloud.client.ServiceInstance;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
@Slf4j @Slf4j
@Service @Service
...@@ -31,8 +24,6 @@ public class FirewallFilterService { ...@@ -31,8 +24,6 @@ public class FirewallFilterService {
@Resource @Resource
private FirewallRulesDao firewallRulesDao; private FirewallRulesDao firewallRulesDao;
@Resource
private DiscoveryClient discoveryClient;
@PostConstruct @PostConstruct
public void refresh() { public void refresh() {
...@@ -68,18 +59,7 @@ public class FirewallFilterService { ...@@ -68,18 +59,7 @@ public class FirewallFilterService {
public void updateFirewallRules(UpdateFirewallRulesRequest request) { public void updateFirewallRules(UpdateFirewallRulesRequest request) {
firewallRulesDao.update(request.getStatus(), request.getMode(), request.getAddresses()); firewallRulesDao.update(request.getStatus(), request.getMode(), request.getAddresses());
refresh(); this.refresh();
// 通过HTTP接口同步到所有的gateway节点(如何保证强一致性呢?)
RestTemplate restTemplate = new RestTemplate();
List<ServiceInstance> instances = discoveryClient.getInstances(GATEWAY_APPLICATION_NAME);
for (ServiceInstance instance : instances) {
String url = String.format("%s://%s:%s/cache/refresh",
instance.isSecure() ? "https" : "http",
instance.getHost(), instance.getPort());
restTemplate.getForEntity(url, ResultEntity.class);
log.info("sync ip rule cache to gateway node: {}", instance.getHost());
}
} }
} }
package com.gitee.sqlrest.core.util;
import cn.hutool.core.util.NumberUtil;
import com.gitee.sqlrest.common.consts.Constants;
import java.util.Map;
import lombok.experimental.UtilityClass;
@UtilityClass
public class PageSizeUtils {
public static int getPageFromParams(Map<String, Object> params) {
int page = (null == params.get(Constants.PARAM_PAGE_NUMBER))
? 1
: NumberUtil.parseInt(params.get(Constants.PARAM_PAGE_NUMBER).toString());
if (page <= 0) {
page = 1;
}
return page;
}
public static int getSizeFromParams(Map<String, Object> params) {
int size = (null == params.get(Constants.PARAM_PAGE_SIZE))
? 10
: NumberUtil.parseInt(params.get(Constants.PARAM_PAGE_SIZE).toString());
if (size <= 0) {
size = 10;
}
return size;
}
}
...@@ -19,6 +19,8 @@ import lombok.extern.slf4j.Slf4j; ...@@ -19,6 +19,8 @@ import lombok.extern.slf4j.Slf4j;
@UtilityClass @UtilityClass
public class SqlJdbcUtils { public class SqlJdbcUtils {
private static final int QUERY_TIMEOUT = 300;
public static boolean isQuerySQL(String sql) { public static boolean isQuerySQL(String sql) {
String upperSql = sql.toUpperCase().trim(); String upperSql = sql.toUpperCase().trim();
return upperSql.startsWith("SELECT") || upperSql.startsWith("WITH"); return upperSql.startsWith("SELECT") || upperSql.startsWith("WITH");
...@@ -29,23 +31,15 @@ public class SqlJdbcUtils { ...@@ -29,23 +31,15 @@ public class SqlJdbcUtils {
} }
public static Object execute(ProductTypeEnum productType, Connection connection, SqlMeta sqlMeta, public static Object execute(ProductTypeEnum productType, Connection connection, SqlMeta sqlMeta,
NamingStrategyEnum strategy, int page, int size) NamingStrategyEnum strategy, int page, int size) throws SQLException {
throws SQLException {
List<Object> paramValues = sqlMeta.getParameter(); List<Object> paramValues = sqlMeta.getParameter();
boolean isQuerySql = isQuerySQL(sqlMeta.getSql()); boolean isQuerySql = isQuerySQL(sqlMeta.getSql());
String sql = isQuerySql ? productType.getPageSql(sqlMeta.getSql()) : sqlMeta.getSql(); String sql = isQuerySql ? productType.getPageSql(sqlMeta.getSql()) : sqlMeta.getSql();
PreparedStatement statement = connection.prepareStatement(sql); PreparedStatement statement = connection.prepareStatement(sql);
statement.setQueryTimeout(300); statement.setQueryTimeout(QUERY_TIMEOUT);
statement.setFetchSize(isMySqlConnection(connection) ? Integer.MIN_VALUE : 100); statement.setFetchSize(isMySqlConnection(connection) ? Integer.MIN_VALUE : size);
if (isQuerySql) { if (isQuerySql) {
if (page <= 0) { productType.getPageConsumer().accept(page, size, paramValues);
page = 1;
}
if (size <= 0) {
size = 10;
}
paramValues.add(size);
paramValues.add((page - 1) * size);
} }
for (int i = 1; i <= paramValues.size(); i++) { for (int i = 1; i <= paramValues.size(); i++) {
statement.setObject(i, paramValues.get(i - 1)); statement.setObject(i, paramValues.get(i - 1));
......
package com.gitee.sqlrest.gateway.controller;
import com.gitee.sqlrest.common.dto.ResultEntity;
import com.gitee.sqlrest.core.gateway.FirewallFilterService;
import javax.annotation.Resource;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/cache")
public class RefreshCacheController {
@Resource
private FirewallFilterService firewallFilterService;
@PostMapping(value = "/refresh", produces = MediaType.APPLICATION_JSON_VALUE)
public ResultEntity refresh() {
firewallFilterService.refresh();
return ResultEntity.success();
}
}
package com.gitee.sqlrest.gateway.filter;
import com.gitee.sqlrest.core.gateway.FirewallFilterService;
import javax.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.event.EventListener;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class FirewallRefreshService {
@Resource
private FirewallFilterService firewallFilterService;
/*每30秒执行一次*/
@EventListener(ApplicationReadyEvent.class)
@Scheduled(cron = "${cron.firewall.expression:0/30 0 * * * ?}")
public void loadFlowRules() {
try {
firewallFilterService.refresh();
} catch (Exception e) {
log.error("load firewall rules failed:{}", e.getMessage(), e);
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment