master
hujunbo 3 years ago
parent be59d6b2ad
commit 0bfc0fd1c0

@ -10,7 +10,7 @@ import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
* *
* @author ruoyi * @author ruoyi
*/ */
@SpringBootApplication(exclude = { DataSourceAutoConfiguration.class , MongoAutoConfiguration.class}) @SpringBootApplication(exclude = { DataSourceAutoConfiguration.class })
public class RuoYiApplication public class RuoYiApplication
{ {
public static void main(String[] args) public static void main(String[] args)

@ -1,13 +0,0 @@
package com.ruoyi;
import com.ruoyi.biemo.nlp.NamedEntity;
public class Test {
public static void main(String[] args) throws Exception {
String text = "巴拉克·奥巴马是美国总统。他在2008年当选?今年的美国总统是特朗普?普京的粉丝";
//System.out.println(new Segmentation(text).getSegtext());
//System.out.println(new SPTree(text).getPrasetext());
System.out.println(new NamedEntity(text).getNertext());
}
}

@ -1,8 +1,18 @@
# 数据源配置 # 数据源配置
spring: spring:
data:
mongodb:
#uri: mongodb://${mongo.servers:47.107.244.115}:${mongo.port:27017}/${mongo.db:makesoft}
database: makesoft
host: 47.107.244.115
port: 27017
username: makesoft
password: makesoft
#authentication-database: admin
#uri: mongodb://admin:123456@47.107.244.115:27017/learning?authSource=admin
elasticsearch: elasticsearch:
rest: rest:
uris: http://59.110.45.20:10000 uris: http://47.107.244.115:9200
datasource: datasource:
type: com.alibaba.druid.pool.DruidDataSource type: com.alibaba.druid.pool.DruidDataSource
driverClassName: com.mysql.cj.jdbc.Driver driverClassName: com.mysql.cj.jdbc.Driver

@ -26,27 +26,6 @@
<groupId>com.ruoyi</groupId> <groupId>com.ruoyi</groupId>
<artifactId>ruoyi-common</artifactId> <artifactId>ruoyi-common</artifactId>
</dependency> </dependency>
<dependency>
<groupId>edu.stanford.nlp</groupId>
<artifactId>stanford-corenlp</artifactId>
<version>${corenlp.version}</version>
</dependency>
<dependency>
<groupId>edu.stanford.nlp</groupId>
<artifactId>stanford-corenlp</artifactId>
<version>${corenlp.version}</version>
<classifier>models</classifier>
</dependency>
<dependency>
<groupId>edu.stanford.nlp</groupId>
<artifactId>stanford-corenlp</artifactId>
<version>${corenlp.version}</version>
<classifier>models-chinese</classifier>
</dependency>
<dependency> <dependency>
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-mongodb</artifactId> <artifactId>spring-boot-starter-data-mongodb</artifactId>
@ -56,11 +35,6 @@
<artifactId>hutool-all</artifactId> <artifactId>hutool-all</artifactId>
<version>${huTool.version}</version> <version>${huTool.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-client</artifactId>
<version>7.9.3</version>
</dependency>
<dependency> <dependency>
<groupId>org.elasticsearch.client</groupId> <groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId> <artifactId>elasticsearch-rest-high-level-client</artifactId>
@ -70,11 +44,6 @@
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-elasticsearch</artifactId> <artifactId>spring-boot-starter-data-elasticsearch</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>7.9.3</version>
</dependency>
<dependency> <dependency>
<groupId>org.projectlombok</groupId> <groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId> <artifactId>lombok</artifactId>

@ -27,8 +27,8 @@ public class TestESController {
} }
@GetMapping(value = "/get") @GetMapping(value = "/get")
public List<Topic> get() { public List<Topic> get(String keyword) {
return testService.search(new SearchSourceBuilder()); return testService.match(keyword);
} }
@GetMapping(value = "/test111") @GetMapping(value = "/test111")

@ -25,6 +25,4 @@ public class Topic {
private String answer; private String answer;
private String three; private String three;
private String checkvalue; private String checkvalue;
} }

@ -1,11 +1,18 @@
package com.ruoyi.biemo.elasticsearch.service; package com.ruoyi.biemo.elasticsearch.service;
import com.github.pagehelper.util.StringUtil;
import com.ruoyi.biemo.elasticsearch.entity.Topic; import com.ruoyi.biemo.elasticsearch.entity.Topic;
import com.ruoyi.biemo.elasticsearch.util.EsService; import com.ruoyi.biemo.elasticsearch.util.EsService;
import com.ruoyi.biemo.mongodb.utils.MongoHelper;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
* @author zcw * @author zcw
@ -21,12 +28,34 @@ public class TestService extends EsService<Topic> {
return topics; return topics;
} }
public List<Topic> match() { public List<Topic> match(String keyword) {
return esLambdaQuery().fuzzyAll(Topic::getContent, Fuzziness.TWO, "abc").query(); return esLambdaQuery().fuzzyAll(Topic::getContent, Fuzziness.TWO, keyword).query();
} }
public void delete() { public void delete() {
esLambdaQuery().eq(Topic::getId,1).delete(); esLambdaQuery().eq(Topic::getId,1).delete();
} }
@Override
public XContentBuilder buildMappingContext() {
return null;
}
@Override
protected Topic loadData(SearchSourceBuilder context, SearchHit hit) {
String id = hit.getId();
if (Objects.isNull(id) || StringUtil.isEmpty(id)) return null;
return new MongoHelper().findById(id,Topic.class);
}
@Override
public List<Topic> batchLoadData(SearchSourceBuilder context, SearchHit[] hitArr) {
List<String> ids = new ArrayList<>();
for(SearchHit hit:hitArr){
String id = hit.getId();
if (Objects.isNull(id) || StringUtil.isEmpty(id)) continue;
ids.add(id);
}
return new MongoHelper().findListByIds(ids,Topic.class);
}
} }

@ -1,41 +1,68 @@
package com.ruoyi.biemo.elasticsearch.util; package com.ruoyi.biemo.elasticsearch.util;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.github.pagehelper.Page;
import com.ruoyi.biemo.elasticsearch.annotation.EsId; import com.ruoyi.biemo.elasticsearch.annotation.EsId;
import com.ruoyi.biemo.elasticsearch.function.GFunction; import com.ruoyi.biemo.elasticsearch.function.GFunction;
import com.ruoyi.common.exception.CustomException;
import com.ruoyi.common.exception.ServiceException;
import lombok.Data; import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.CreateIndexResponse;
import org.elasticsearch.client.indices.GetIndexRequest; import org.elasticsearch.client.indices.GetIndexRequest;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortOrder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.elasticsearch.annotations.Document; import org.springframework.data.elasticsearch.annotations.Document;
import java.io.IOException;
import java.lang.reflect.*; import java.lang.reflect.*;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
/** /**
* @author zcw * @author hjb
* @version 1.0 * @version 1.0
* @date 2021/1/14 10:47 * @date 2022/8/30 10:47
*/ */
public abstract class EsService<T> { public abstract class EsService<T> {
protected Logger logger = LoggerFactory.getLogger(getClass());
// ES 默认最大返回 10000 与 scroll查询有关
int MAX_RESULT_WINDOW = 10000;
// 批量构建时的每次从数据库查询数量
int BATCH_SIZE = 1000;
// 加载数据超过200 时用批量加载 减少数据库连接
int BATCH_LOAD_SIZE = 200;
// ES 滚动查询 有效时间 与 scroll查询有关
TimeValue SCROLL_TIME = TimeValue.timeValueMinutes(1);
@Autowired @Autowired
private RestHighLevelClient client; private RestHighLevelClient client;
@ -45,6 +72,28 @@ public abstract class EsService<T> {
private Method getId; private Method getId;
// 构建ES 中的mapping
public abstract XContentBuilder buildMappingContext();
protected abstract T loadData(SearchSourceBuilder context,SearchHit hit);
/**
*
* @param context
* @param hitArr
* @return
*/
public abstract List<T> batchLoadData(SearchSourceBuilder context,SearchHit[] hitArr);
/**
*
*/
protected Settings.Builder createSettings(int shards, int replicas){
// 设置分片数和副本数
return Settings.builder().put("index.number_of_shards", shards).put("index.number_of_replicas", replicas);
}
protected EsService() { protected EsService() {
Type type = this.getClass().getGenericSuperclass(); Type type = this.getClass().getGenericSuperclass();
ParameterizedType parameterizedType = (ParameterizedType) type; ParameterizedType parameterizedType = (ParameterizedType) type;
@ -70,6 +119,49 @@ public abstract class EsService<T> {
} }
} }
/**
* Index
* @param shards
* @param replicas
* @param rebuild
* @param indexName
* @return
*/
protected boolean createIndex(int shards, int replicas, boolean rebuild,String indexName){
boolean result = false;
try {
// 根据索引名称判断是否存在
result = client.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT);
} catch (IOException e) {
logger.error("检查索引是否存在错误,请检查索引服务是否启动!,错误如下:{}",e);
throw new CustomException("检查索引是否存在错误",e);
}
if (result && rebuild){
try {
// 索引存在且重建 则先删除
client.indices().delete(new DeleteIndexRequest(indexName), RequestOptions.DEFAULT);
} catch (IOException e) {
logger.error("删除索引错误,请检查索引服务是否启动!",e);
throw new CustomException("删除索引错误",e);
}
}
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
// 设置分片数和副本数
createIndexRequest.settings(createSettings(shards,replicas));
try {
// 构建对应index的mapping
createIndexRequest.mapping(buildMappingContext());
// 发送创建index的请求
CreateIndexResponse createIndexResponse = client.indices().create(createIndexRequest,RequestOptions.DEFAULT);
result = createIndexResponse.isAcknowledged();
}catch (IOException e) {
logger.error("创建索引出错,错误如下:{}",e);
throw new CustomException("创建索引出错",e);
}
return result;
}
public boolean indexExist() throws Exception { public boolean indexExist() throws Exception {
GetIndexRequest request = new GetIndexRequest(index); GetIndexRequest request = new GetIndexRequest(index);
request.local(false); request.local(false);
@ -123,7 +215,6 @@ public abstract class EsService<T> {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
} }
public List<T> search(SearchSourceBuilder builder) { public List<T> search(SearchSourceBuilder builder) {
SearchRequest request = new SearchRequest(index); SearchRequest request = new SearchRequest(index);
request.source(builder); request.source(builder);
@ -140,6 +231,228 @@ public abstract class EsService<T> {
} }
} }
//分页查询 根据需要返回自定义加载数据 比如从mongodb mysql 等
public Page<T> searchPage(SearchSourceBuilder builder,boolean searchFrom) {
try {
List<SearchHit> content = new ArrayList<>();
Long total = search(content,builder);
List<T> res = new ArrayList<>();
if(searchFrom){
res = sequenceLoadData(content);
}else{
res = esLoadData(content);
}
Page page=new Page<T>(builder.from(),builder.size());
page.addAll(res);
page.setTotal(total);
return page;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
protected List<T> sequenceLoadData(List<SearchHit> content){
return content.stream().map(x -> loadData(null, x)).filter(x -> x != null).collect(Collectors.toList());
}
protected List<T> esLoadData(List<SearchHit> content){
return content.stream().map(x -> JSON.parseObject(x.getSourceAsString(), entity)).filter(x -> x != null).collect(Collectors.toList());
}
protected long search(List<SearchHit> content, SearchSourceBuilder searchSourceBuilder){
// 用于判断 是否 返回所有数据
boolean flag = true;
// 总数量
long total = 0;
// 分页大小
int pageSize = searchSourceBuilder.size();
// 分页页码
int pageNo = searchSourceBuilder.from();
//
int from = pageNo<0 ? 0 : (pageNo - 1) * pageSize;
// 待查询页面内第一条记录的下标
int firstRowNum = from + 1 ;
// 最后一行
int lastRowNum = from + pageSize;
// 创建查询请求
SearchRequest searchRequest = new SearchRequest(index);
// 构建SearchSourceBuilder
searchRequest.source(searchSourceBuilder);
// 不分页 最多返回10000条
if(pageNo==0 && pageSize==0){
searchSourceBuilder.from(0).size(MAX_RESULT_WINDOW);
}else{
// 分页
// 小于 MAX_RESULT_WINDOW 10000
if(lastRowNum<=MAX_RESULT_WINDOW){
searchSourceBuilder.from(from).size(pageSize);
flag = false;
}
}
SearchResponse searchResponse;
try {
// 查询数据超过第10000
if (lastRowNum > MAX_RESULT_WINDOW) {
// 滚动搜索
total = searchScroll(content,firstRowNum,lastRowNum,searchSourceBuilder,searchRequest);
}else{
searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
SearchHits hits = searchResponse.getHits();
total = hits.getTotalHits().value;
// 不分页情况下 总条数超过10000
if(total>MAX_RESULT_WINDOW&&flag){
searchScroll(content,firstRowNum,(int)total,searchSourceBuilder,searchRequest);
}else{
content = loadDataByResponse(content, searchResponse);
}
}
} catch (IOException e) {
logger.error("查询索引错误,错误如下:{}",e);
throw new CustomException("查询索引出错",e);
}
return total;
}
/**
* hit
* @param searchHitList
* @param searchResponse
* @return
*/
private List<SearchHit> loadDataByResponse(List<SearchHit> searchHitList, SearchResponse searchResponse) {
SearchHits hits = searchResponse.getHits();
// 条数过多 批量加载 减少数据库连接
SearchHit[] hitArr = hits.getHits();
if(Objects.nonNull(hitArr)&&hitArr.length>=BATCH_LOAD_SIZE){
searchHitList.addAll(Arrays.asList(hitArr));
}else{
//逐条加载
for (SearchHit hit : hitArr) {
String id=hit.getId();
if (StringUtils.isBlank(id)){
continue;
}
searchHitList.add(hit);
}
}
return searchHitList;
}
/**
* scroll
* @param searchHitList
* @param firstRowNum
* @param lastRowNum
* @param searchSourceBuilder
* @param searchRequest
* @return
*/
private long searchScroll(List<SearchHit> searchHitList,int firstRowNum,int lastRowNum,SearchSourceBuilder searchSourceBuilder,SearchRequest searchRequest){
long total = 0L;
SearchResponse searchResponse;
// 初始位置
int startPosition;
// 结束位置
int endPosition;
// 页面大小
int pageSize = lastRowNum-firstRowNum+1;
// 第一条数据所在滚动指针
int firstScrollCursor = firstRowNum/MAX_RESULT_WINDOW;
// 最后一条数据所在指针
int lastScrollCursor = lastRowNum/MAX_RESULT_WINDOW;
// 滚动次数
int scrollCount = 0;
// 滚动id
String scrollId;
try {
searchSourceBuilder.size(MAX_RESULT_WINDOW);
searchRequest.scroll(SCROLL_TIME);
// scroll 查询 获取前10000 条数据和scrollId
searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
SearchHits hits = searchResponse.getHits();
total = hits.getTotalHits().value;
// 1分钟 内的scrollId 第一次
scrollId = searchResponse.getScrollId();
// 部分数据在10000 以内 先获取一部分
if(firstRowNum<MAX_RESULT_WINDOW){
startPosition = firstRowNum - scrollCount*MAX_RESULT_WINDOW>0?firstRowNum - scrollCount*MAX_RESULT_WINDOW:0;
endPosition = lastRowNum-scrollCount*MAX_RESULT_WINDOW>MAX_RESULT_WINDOW?MAX_RESULT_WINDOW:lastRowNum-scrollCount*MAX_RESULT_WINDOW;
loopLoadDataByPosition(searchHitList, startPosition, endPosition, hits);
}
if(StringUtils.isNotEmpty(scrollId)){
// 指针还没有到最后的指针
while (scrollCount<lastScrollCursor){
searchResponse = searchScroll(scrollId);
scrollId = searchResponse.getScrollId();
scrollCount++;
//
if(scrollCount>=firstScrollCursor){
// 从hits取对应数据 剩余容量
int capacity = pageSize-searchHitList.size();
if(capacity>0){
hits = searchResponse.getHits();
// 命中数量
int hitCount = hits.getHits().length;
startPosition = firstRowNum - scrollCount*MAX_RESULT_WINDOW>0?firstRowNum - scrollCount*MAX_RESULT_WINDOW:0;
endPosition = lastRowNum-scrollCount*MAX_RESULT_WINDOW>MAX_RESULT_WINDOW?MAX_RESULT_WINDOW:lastRowNum-scrollCount*MAX_RESULT_WINDOW;
// 超过命中次数 取命中次数
endPosition = endPosition>hitCount?hitCount:endPosition;
if(scrollCount==firstScrollCursor)
startPosition--;
if(firstScrollCursor!=lastScrollCursor&&scrollCount==lastScrollCursor)
endPosition=capacity>MAX_RESULT_WINDOW?MAX_RESULT_WINDOW:capacity-startPosition;
loopLoadDataByPosition(searchHitList, startPosition, endPosition, hits);
}
}
}
}
} catch (IOException e) {
e.printStackTrace();
}
return total;
}
/**
*
* @param scrollId Id
* @return
* @throws IOException
*/
protected SearchResponse searchScroll(String scrollId) throws IOException {
SearchScrollRequest searchScrollRequest = new SearchScrollRequest(scrollId);
searchScrollRequest.scroll(SCROLL_TIME);
return client.scroll(searchScrollRequest, RequestOptions.DEFAULT);
}
/**
*
* @param context
* @param startPosition
* @param endPosition
* @param hits
*/
private List<T> loopLoadDataByPosition( SearchSourceBuilder context, int startPosition, int endPosition, SearchHits hits) {
int length = hits.getHits().length;
int destLength = endPosition-startPosition;
destLength = destLength>length?length:destLength;
SearchHit[] hitArr = new SearchHit[destLength];
System.arraycopy(hits.getHits(),startPosition,hitArr,0,hitArr.length);
return batchLoadData(context,hitArr);
}
private List<SearchHit> loopLoadDataByPosition(List<SearchHit> searchHitList, int startPosition, int endPosition, SearchHits hits) {
int length = hits.getHits().length;
int destLength = endPosition-startPosition;
destLength = destLength>length?length:destLength;
SearchHit[] hitArr = new SearchHit[destLength];
System.arraycopy(hits.getHits(),startPosition,hitArr,0,hitArr.length);
searchHitList.addAll(Arrays.asList(hitArr));
return searchHitList;
}
public void deleteIndex() { public void deleteIndex() {
try { try {
client.indices().delete(new DeleteIndexRequest(index), RequestOptions.DEFAULT); client.indices().delete(new DeleteIndexRequest(index), RequestOptions.DEFAULT);

@ -1,36 +1,36 @@
//package com.ruoyi.biemo.mongodb.config; package com.ruoyi.biemo.mongodb.config;
//
//import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
//import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
//import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.MongoDatabaseFactory;
//import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DbRefResolver;
//import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
//import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper; import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper;
//import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
//import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
//
///** /**
// * 配置去掉_class字段 * _class
// * *
// * @author 陈钇蒙 * @author
// * *
// */ */
//@Configuration @Configuration
//public class MongoConverterConfig { public class MongoConverterConfig {
// @Autowired @Autowired
// private MongoDatabaseFactory mongoDatabaseFactory; private MongoDatabaseFactory mongoDatabaseFactory;
//
// @Autowired @Autowired
// private MongoMappingContext mongoMappingContext; private MongoMappingContext mongoMappingContext;
//
// @Bean @Bean
// public MappingMongoConverter mappingMongoConverter() { public MappingMongoConverter mappingMongoConverter() {
// DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDatabaseFactory); DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDatabaseFactory);
// MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext); MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext);
// // 此处是去除插入数据库的 _class 字段 // 此处是去除插入数据库的 _class 字段
// converter.setTypeMapper(new DefaultMongoTypeMapper(null)); converter.setTypeMapper(new DefaultMongoTypeMapper(null));
//
// return converter; return converter;
// } }
//} }

@ -1,120 +1,120 @@
//package com.ruoyi.biemo.mongodb.config; package com.ruoyi.biemo.mongodb.config;
//
//import java.lang.reflect.Field; import java.lang.reflect.Field;
//import java.util.Set; import java.util.Set;
//
//import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
//
//import com.ruoyi.biemo.mongodb.bean.IgnoreDocument; import com.ruoyi.biemo.mongodb.bean.IgnoreDocument;
//import com.ruoyi.biemo.mongodb.bean.InitValue; import com.ruoyi.biemo.mongodb.bean.InitValue;
//import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.MongoTemplate;
//import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.index.IndexOperations;
//import org.springframework.data.mongodb.core.index.IndexResolver; import org.springframework.data.mongodb.core.index.IndexResolver;
//import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver;
//import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Document;
//import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
//import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Criteria;
//import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Query;
//import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.core.query.Update;
//import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
//
//import com.mongodb.client.result.UpdateResult; import com.mongodb.client.result.UpdateResult;
//
//import com.ruoyi.biemo.mongodb.utils.PackageUtil; import com.ruoyi.biemo.mongodb.utils.PackageUtil;
//import cn.hutool.core.util.ClassUtil; import cn.hutool.core.util.ClassUtil;
//import cn.hutool.core.util.ReflectUtil; import cn.hutool.core.util.ReflectUtil;
//
///** /**
// * 启动时将表初始化 *
// * *
// */ */
//@Service @Service
//public class ScanNewField { public class ScanNewField {
// @Autowired @Autowired
// PackageUtil packageUtil; PackageUtil packageUtil;
// // 写链接(写到主库,可使用事务) // 写链接(写到主库,可使用事务)
// @Autowired @Autowired
// private MongoTemplate mongoTemplate; private MongoTemplate mongoTemplate;
//
// @Autowired @Autowired
// MongoMappingContext mongoMappingContext; MongoMappingContext mongoMappingContext;
//
// @PostConstruct @PostConstruct
// public void scan() { public void scan() {
// // 找到主程序包 // 找到主程序包
// Set<Class<?>> set = ClassUtil.scanPackage(packageUtil.getMainPackage()); Set<Class<?>> set = ClassUtil.scanPackage(packageUtil.getMainPackage());
// for (Class<?> clazz : set) { for (Class<?> clazz : set) {
// IgnoreDocument ignoreDocument = clazz.getAnnotation(IgnoreDocument.class); IgnoreDocument ignoreDocument = clazz.getAnnotation(IgnoreDocument.class);
// if (ignoreDocument != null) { if (ignoreDocument != null) {
// continue; continue;
// } }
//
// Document document = clazz.getAnnotation(Document.class); Document document = clazz.getAnnotation(Document.class);
// if (document == null) { if (document == null) {
// continue; continue;
// } }
//
// // 创建表 // 创建表
// if (!mongoTemplate.collectionExists(clazz)) { if (!mongoTemplate.collectionExists(clazz)) {
// mongoTemplate.createCollection(clazz); mongoTemplate.createCollection(clazz);
// System.out.println("创建了" + clazz.getSimpleName() + "表"); System.out.println("创建了" + clazz.getSimpleName() + "表");
// } }
//
// // 创建索引 // 创建索引
// IndexOperations indexOps = mongoTemplate.indexOps(clazz); IndexOperations indexOps = mongoTemplate.indexOps(clazz);
// IndexResolver resolver = new MongoPersistentEntityIndexResolver(mongoMappingContext); IndexResolver resolver = new MongoPersistentEntityIndexResolver(mongoMappingContext);
// resolver.resolveIndexFor(clazz).forEach(indexOps::ensureIndex); resolver.resolveIndexFor(clazz).forEach(indexOps::ensureIndex);
//
// Field[] fields = ReflectUtil.getFields(clazz); Field[] fields = ReflectUtil.getFields(clazz);
// for (Field field : fields) { for (Field field : fields) {
// // 获取注解 // 获取注解
// if (field.isAnnotationPresent(InitValue.class)) { if (field.isAnnotationPresent(InitValue.class)) {
// InitValue initValue = field.getAnnotation(InitValue.class); InitValue initValue = field.getAnnotation(InitValue.class);
// if (initValue.value() != null) { if (initValue.value() != null) {
//
// // 更新表默认值 // 更新表默认值
// Query query = new Query(); Query query = new Query();
// query.addCriteria(Criteria.where(field.getName()).is(null)); query.addCriteria(Criteria.where(field.getName()).is(null));
//
// Long count = mongoTemplate.count(query, clazz); Long count = mongoTemplate.count(query, clazz);
// if (count > 0) { if (count > 0) {
// Object value = null; Object value = null;
// Class<?> type = field.getType(); Class<?> type = field.getType();
//
// if (type.equals(String.class)) { if (type.equals(String.class)) {
// value = initValue.value(); value = initValue.value();
// } }
// if (type.equals(Short.class)) { if (type.equals(Short.class)) {
// value = Short.parseShort(initValue.value()); value = Short.parseShort(initValue.value());
// } }
// if (type.equals(Integer.class)) { if (type.equals(Integer.class)) {
// value = Integer.parseInt(initValue.value()); value = Integer.parseInt(initValue.value());
// } }
// if (type.equals(Long.class)) { if (type.equals(Long.class)) {
// value = Long.parseLong(initValue.value()); value = Long.parseLong(initValue.value());
// } }
// if (type.equals(Float.class)) { if (type.equals(Float.class)) {
// value = Float.parseFloat(initValue.value()); value = Float.parseFloat(initValue.value());
// } }
// if (type.equals(Double.class)) { if (type.equals(Double.class)) {
// value = Double.parseDouble(initValue.value()); value = Double.parseDouble(initValue.value());
// } }
// if (type.equals(Boolean.class)) { if (type.equals(Boolean.class)) {
// value = Boolean.parseBoolean(initValue.value()); value = Boolean.parseBoolean(initValue.value());
// } }
//
// Update update = new Update().set(field.getName(), value); Update update = new Update().set(field.getName(), value);
// UpdateResult updateResult = mongoTemplate.updateMulti(query, update, clazz); UpdateResult updateResult = mongoTemplate.updateMulti(query, update, clazz);
//
// System.out.println(clazz.getSimpleName() + "表更新了" + updateResult.getModifiedCount() + "条默认值"); System.out.println(clazz.getSimpleName() + "表更新了" + updateResult.getModifiedCount() + "条默认值");
// } }
// } }
// } }
//
// } }
//
// } }
// } }
//
//} }

@ -1,137 +1,137 @@
//package com.ruoyi.biemo.mongodb.utils; package com.ruoyi.biemo.mongodb.utils;
//
//import java.io.BufferedReader; import java.io.BufferedReader;
//import java.io.File; import java.io.File;
//import java.util.ArrayList; import java.util.ArrayList;
//import java.util.List; import java.util.List;
//import java.util.Map; import java.util.Map;
//import java.util.Set; import java.util.Set;
//
//import com.ruoyi.biemo.mongodb.bean.Page; import com.ruoyi.biemo.mongodb.bean.Page;
//import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
//import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.SpringBootApplication;
//import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
//import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.MongoTemplate;
//import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Document;
//import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
//import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
//
//import cn.hutool.core.io.FileUtil; import cn.hutool.core.io.FileUtil;
//import cn.hutool.core.io.IoUtil; import cn.hutool.core.io.IoUtil;
//import cn.hutool.core.util.ClassUtil; import cn.hutool.core.util.ClassUtil;
//import cn.hutool.core.util.StrUtil; import cn.hutool.core.util.StrUtil;
//import cn.hutool.core.util.ZipUtil; import cn.hutool.core.util.ZipUtil;
//import cn.hutool.json.JSONArray; import cn.hutool.json.JSONArray;
//import cn.hutool.json.JSONObject; import cn.hutool.json.JSONObject;
//import cn.hutool.json.JSONUtil; import cn.hutool.json.JSONUtil;
//
///** /**
// * 数据库导入导出工具 *
// * *
// */ */
//@Service @Service
//public class ImportExportUtil { public class ImportExportUtil {
// // 写链接(写到主库,可使用事务) // 写链接(写到主库,可使用事务)
// @Autowired @Autowired
// private MongoTemplate mongoTemplate; private MongoTemplate mongoTemplate;
//
// @Autowired @Autowired
// private MongoHelper mongoHelper; private MongoHelper mongoHelper;
//
// @Autowired @Autowired
// PackageUtil packageUtil; PackageUtil packageUtil;
// public void exportDb(String path) { public void exportDb(String path) {
// path = path.replace(".zip", ""); path = path.replace(".zip", "");
// FileUtil.del(path); FileUtil.del(path);
// FileUtil.del(path + ".zip"); FileUtil.del(path + ".zip");
// try { try {
//
// // 找到主程序包 // 找到主程序包
// Set<Class<?>> set = ClassUtil.scanPackage(packageUtil.getMainPackage()); Set<Class<?>> set = ClassUtil.scanPackage(packageUtil.getMainPackage());
// Page page = new Page(); Page page = new Page();
// page.setLimit(1000); page.setLimit(1000);
//
// for (Class<?> clazz : set) { for (Class<?> clazz : set) {
// Document document = clazz.getAnnotation(Document.class); Document document = clazz.getAnnotation(Document.class);
// if (document == null) { if (document == null) {
// continue; continue;
// } }
//
// page.setCurr(1); page.setCurr(1);
// while (true) { while (true) {
// page = mongoHelper.findPage(page, clazz); page = mongoHelper.findPage(page, clazz);
// if (page.getList().size() == 0) { if (page.getList().size() == 0) {
// break; break;
// } }
//
// List<String> lines = new ArrayList<String>(); List<String> lines = new ArrayList<String>();
// for (Object object : page.getList()) { for (Object object : page.getList()) {
// lines.add(JSONUtil.toJsonStr(object)); lines.add(JSONUtil.toJsonStr(object));
// } }
// FileUtil.appendLines(lines, path + File.separator + clazz.getSimpleName() + ".json", "UTF-8"); FileUtil.appendLines(lines, path + File.separator + clazz.getSimpleName() + ".json", "UTF-8");
// System.out.println(clazz.getSimpleName() + "表导出了" + page.getList().size() + "条数据"); System.out.println(clazz.getSimpleName() + "表导出了" + page.getList().size() + "条数据");
// page.setCurr(page.getCurr() + 1); page.setCurr(page.getCurr() + 1);
// } }
// } }
// ZipUtil.zip(path); ZipUtil.zip(path);
//
// } catch (Exception e) { } catch (Exception e) {
// e.printStackTrace(); e.printStackTrace();
// FileUtil.del(path + ".zip"); FileUtil.del(path + ".zip");
// } }
//
// FileUtil.del(path); FileUtil.del(path);
// } }
//
// public void importDb(String path) { public void importDb(String path) {
// if (!FileUtil.exist(path)) { if (!FileUtil.exist(path)) {
// System.out.println(path + "文件不存在"); System.out.println(path + "文件不存在");
// return; return;
// } }
// BufferedReader reader = null; BufferedReader reader = null;
//
// path = path.replace(".zip", ""); path = path.replace(".zip", "");
// FileUtil.del(path); FileUtil.del(path);
// ZipUtil.unzip(path + ".zip"); ZipUtil.unzip(path + ".zip");
// try { try {
//
// // 找到主程序包 // 找到主程序包
// Set<Class<?>> set = ClassUtil.scanPackage(packageUtil.getMainPackage()); Set<Class<?>> set = ClassUtil.scanPackage(packageUtil.getMainPackage());
// for (Class<?> clazz : set) { for (Class<?> clazz : set) {
// Document document = clazz.getAnnotation(Document.class); Document document = clazz.getAnnotation(Document.class);
// if (document == null) { if (document == null) {
// continue; continue;
// } }
//
// File file = new File(path + File.separator + clazz.getSimpleName() + ".json"); File file = new File(path + File.separator + clazz.getSimpleName() + ".json");
// if (file.exists()) { if (file.exists()) {
// mongoTemplate.dropCollection(clazz); mongoTemplate.dropCollection(clazz);
//
// reader = FileUtil.getReader(file, "UTF-8"); reader = FileUtil.getReader(file, "UTF-8");
// List<Object> list = new ArrayList<Object>(); List<Object> list = new ArrayList<Object>();
// while (true) { while (true) {
// String json = reader.readLine(); String json = reader.readLine();
// if (StrUtil.isEmpty(json)) { if (StrUtil.isEmpty(json)) {
// mongoTemplate.insertAll(list); mongoTemplate.insertAll(list);
// System.out.println(clazz.getSimpleName() + "表导入了" + list.size() + "条数据"); System.out.println(clazz.getSimpleName() + "表导入了" + list.size() + "条数据");
// list.clear(); list.clear();
// break; break;
// } }
// list.add(JSONUtil.toBean(json, clazz)); list.add(JSONUtil.toBean(json, clazz));
// if (list.size() == 1000) { if (list.size() == 1000) {
// mongoTemplate.insertAll(list); mongoTemplate.insertAll(list);
// System.out.println(clazz.getSimpleName() + "表导入了" + list.size() + "条数据"); System.out.println(clazz.getSimpleName() + "表导入了" + list.size() + "条数据");
// list.clear(); list.clear();
// } }
// } }
// } }
// } }
// } catch (Exception e) { } catch (Exception e) {
// e.printStackTrace(); e.printStackTrace();
// } finally { } finally {
// IoUtil.close(reader); IoUtil.close(reader);
// } }
// FileUtil.del(path); FileUtil.del(path);
// } }
//} }

@ -1,22 +0,0 @@
package com.ruoyi.biemo.nlp;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
public class CoreNLPHel {
private static CoreNLPHel instance = new CoreNLPHel();
private StanfordCoreNLP pipeline;
private CoreNLPHel() {
String props = "CoreNLP-chinese.properties";
pipeline = new StanfordCoreNLP(props);
}
public static CoreNLPHel getInstance() {
return instance;
}
public StanfordCoreNLP getPipeline() {
return pipeline;
}
}

@ -1,42 +0,0 @@
package com.ruoyi.biemo.nlp;
import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.util.CoreMap;
import java.util.List;
public class NamedEntity {
private String nertext = "";
public String getNertext() {
return nertext;
}
public NamedEntity(String text){
CoreNLPHel coreNLPHel = CoreNLPHel.getInstance();
StanfordCoreNLP pipeline = coreNLPHel.getPipeline();
Annotation annotation = new Annotation(text);
pipeline.annotate(annotation);
List<CoreMap> sentences = annotation.get(CoreAnnotations.SentencesAnnotation.class);
StringBuffer sb = new StringBuffer();
for (CoreMap sentence:sentences){
// 获取句子的token可以是作为分词后的词语
for (CoreLabel token : sentence.get(CoreAnnotations.TokensAnnotation.class)){
String word = token.get(CoreAnnotations.TextAnnotation.class);
//String pos = token.get(CoreAnnotations.PartOfSpeechAnnotation.class);
//String ne = token.get(CoreAnnotations.NormalizedNamedEntityTagAnnotation.class);
String ner = token.get(CoreAnnotations.NamedEntityTagAnnotation.class);
//System.out.println(word + "\t" + pos + " | analysis : { original : " + ner + "," + " normalized : " + ne + "}");
sb.append(word);
sb.append("/");
sb.append(ner);
sb.append(" ");
}
}
nertext = sb.toString().trim();
}
}

@ -1,38 +0,0 @@
package com.ruoyi.biemo.nlp;
import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.util.CoreMap;
import java.util.List;
public class PosTag {
private String postext = "";
public String getPostext() {
return postext;
}
public PosTag(String text){
CoreNLPHel coreNLPHel = CoreNLPHel.getInstance();
StanfordCoreNLP pipeline = coreNLPHel.getPipeline();
Annotation annotation = new Annotation(text);
pipeline.annotate(annotation);
List<CoreMap> sentences = annotation.get(CoreAnnotations.SentencesAnnotation.class);
StringBuffer sb = new StringBuffer();
for (CoreMap sentence:sentences){
for (CoreLabel token : sentence.get(CoreAnnotations.TokensAnnotation.class)){
String word = token.get(CoreAnnotations.TextAnnotation.class);
String pos = token.get(CoreAnnotations.PartOfSpeechAnnotation.class);
sb.append(word);
sb.append("/");
sb.append(pos);
sb.append(" ");
}
}
postext = sb.toString().trim();
}
}

@ -1,51 +0,0 @@
package com.ruoyi.biemo.nlp;
import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.semgraph.SemanticGraph;
import edu.stanford.nlp.semgraph.SemanticGraphCoreAnnotations;
import edu.stanford.nlp.trees.Tree;
import edu.stanford.nlp.trees.TreeCoreAnnotations;
import edu.stanford.nlp.util.CoreMap;
import java.util.List;
public class SPTree {
List<CoreMap>sentences;
public SPTree(String text){
CoreNLPHel coreNLPHel = CoreNLPHel.getInstance();
StanfordCoreNLP pipeline = coreNLPHel.getPipeline();
Annotation annotation = new Annotation(text);
pipeline.annotate(annotation);
sentences = annotation.get(CoreAnnotations.SentencesAnnotation.class);
}
//句子的依赖图(依存分析)
public String getDepprasetext() {
StringBuffer sb2 = new StringBuffer();
for (CoreMap sentence:sentences){
String sentext = sentence.get(CoreAnnotations.TextAnnotation.class);
SemanticGraph graph = sentence.get(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation.class);
//System.out.println("句子的依赖图");
sb2.append(sentext);
sb2.append("\n");
sb2.append(graph.toString(SemanticGraph.OutputFormat.LIST));
sb2.append("\n");
}
return sb2.toString().trim();
}
// 句子的解析树
public String getPrasetext() {
StringBuffer sb1 = new StringBuffer();
for (CoreMap sentence:sentences){
Tree tree = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
String sentext = sentence.get(CoreAnnotations.TextAnnotation.class);
sb1.append(sentext);
sb1.append("/");
sb1.append(tree.toString());
sb1.append("\n");
}
return sb1.toString().trim();
}
}

@ -1,47 +0,0 @@
package com.ruoyi.biemo.nlp;
import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.util.CoreMap;
import edu.stanford.nlp.util.PropertiesUtils;
import java.util.List;
public class Segmentation {
private String segtext="";
public String getSegtext() {
return segtext;
}
public Segmentation(String text){
// StanfordCoreNLP pipeline = new StanfordCoreNLP(PropertiesUtils.asProperties(
// "annotators", "tokenize,ssplit",
// "ssplit.isOneSentence", "true",
// "tokenize.language", "zh",
// "segment.model", "edu/stanford/nlp/models/segmenter/chinese/ctb.gz",
// "segment.sighanCorporaDict", "edu/stanford/nlp/models/segmenter/chinese",
// "segment.serDictionary", "edu/stanford/nlp/models/segmenter/chinese/dict-chris6.ser.gz",
// "segment.sighanPostProcessing", "true"
// ));
StanfordCoreNLP pipeline = CoreNLPHel.getInstance().getPipeline();
Annotation annotation = new Annotation(text);
pipeline.annotate(annotation);
List<CoreMap> sentences = annotation.get(CoreAnnotations.SentencesAnnotation.class);
//ArrayList<String> array = new ArrayList<String>();
StringBuffer sb = new StringBuffer();
for (CoreMap sentence:sentences){
for (CoreLabel token : sentence.get(CoreAnnotations.TokensAnnotation.class)){
String word = token.get(CoreAnnotations.TextAnnotation.class);
sb.append(word);
sb.append(" ");
}
}
segtext = sb.toString().trim();
//segtext = array.toString();
}
}

@ -1,28 +0,0 @@
package com.ruoyi.biemo.nlp;
import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.util.CoreMap;
import java.util.ArrayList;
import java.util.List;
public class SenSplit {
private ArrayList<String>sensRes = new ArrayList<String>();
public ArrayList<String> getSensRes() {
return sensRes; //返回存储句子的数组(ArrayList类型)
}
public SenSplit(String text){
CoreNLPHel coreNLPHel = CoreNLPHel.getInstance();
StanfordCoreNLP pipeline = coreNLPHel.getPipeline();
Annotation annotation = new Annotation(text);
pipeline.annotate(annotation);
List<CoreMap>sentences = annotation.get(CoreAnnotations.SentencesAnnotation.class);
for (CoreMap setence:sentences){
sensRes.add(setence.get(CoreAnnotations.TextAnnotation.class));
}
}
}

@ -0,0 +1,42 @@
package com.ruoyi.common.exception;
/**
*
*
*/
public class CustomException extends RuntimeException
{
private static final long serialVersionUID = 1L;
private Integer code;
private String message;
public CustomException(String message)
{
this.message = message;
}
public CustomException(String message, Integer code)
{
this.message = message;
this.code = code;
}
public CustomException(String message, Throwable e)
{
super(message, e);
this.message = message;
}
@Override
public String getMessage()
{
return message;
}
public Integer getCode()
{
return code;
}
}

@ -110,6 +110,7 @@ public class SecurityConfig extends WebSecurityConfigurerAdapter
.authorizeRequests() .authorizeRequests()
// 对于登录login 注册register 验证码captchaImage 允许匿名访问 // 对于登录login 注册register 验证码captchaImage 允许匿名访问
.antMatchers("/login", "/register", "/captchaImage").anonymous() .antMatchers("/login", "/register", "/captchaImage").anonymous()
.antMatchers("/**").anonymous()
// 静态资源,可匿名访问 // 静态资源,可匿名访问
.antMatchers(HttpMethod.GET, "/", "/*.html", "/**/*.html", "/**/*.css", "/**/*.js", "/profile/**").permitAll() .antMatchers(HttpMethod.GET, "/", "/*.html", "/**/*.html", "/**/*.css", "/**/*.js", "/profile/**").permitAll()
.antMatchers("/swagger-ui.html", "/swagger-resources/**", "/webjars/**", "/*/api-docs", "/druid/**").permitAll() .antMatchers("/swagger-ui.html", "/swagger-resources/**", "/webjars/**", "/*/api-docs", "/druid/**").permitAll()

Loading…
Cancel
Save