情绪分析页面接口

master
xiaoCJ 2 years ago
parent ff834b3310
commit 30eee5ac36

@ -11,6 +11,7 @@ import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;
import com.ruoyi.biemo.business.domain.Category;
import com.ruoyi.biemo.business.domain.DocInfo;
import com.ruoyi.biemo.business.domain.EmotionResult;
import com.ruoyi.biemo.business.domain.WordCloudItem;
import com.ruoyi.biemo.business.service.CategoryService;
import com.ruoyi.biemo.business.service.DocInfoService;
@ -77,8 +78,15 @@ public class DocInfoController extends BaseController {
@GetMapping("/getWordCloudByCateId/{categoryId}")
public AjaxResult getWordCloudByCateId(@PathVariable String categoryId){
List<WordCloudItem> wordCloudItems = docInfoService.getWordCloudByCateId(categoryId);
return AjaxResult.success(wordCloudItems);
}
@GetMapping("/getEmotionAnalysisByCateId/{categoryId}")
public AjaxResult getEmotionAnalysis(@PathVariable String categoryId){
EmotionResult emotionResult = docInfoService.getEmotionAnalysis(categoryId);
return AjaxResult.success(emotionResult);
}
/**
*
*/

@ -0,0 +1,50 @@
package com.ruoyi.biemo.business.domain;
public class EmotionResult {
private String downName;
private String upName;
private int downCount;
private int upCount;
public EmotionResult(String downName, String upName, int downCount, int upCount) {
this.downName = downName;
this.upName = upName;
this.downCount = downCount;
this.upCount = upCount;
}
public String getDownName() {
return downName;
}
public String getUpName() {
return upName;
}
public int getDownCount() {
return downCount;
}
public int getUpCount() {
return upCount;
}
public void setDownName(String downName) {
this.downName = downName;
}
public void setUpName(String upName) {
this.upName = upName;
}
public void setDownCount(int downCount) {
this.downCount = downCount;
}
public void setUpCount(int upCount) {
this.upCount = upCount;
}
public EmotionResult() {
}
}

@ -4,10 +4,7 @@ import com.alibaba.fastjson.JSONObject;
import com.github.pagehelper.util.StringUtil;
import com.hankcs.hanlp.seg.common.Term;
import com.hankcs.hanlp.tokenizer.NLPTokenizer;
import com.ruoyi.biemo.business.domain.DocInfo;
import com.ruoyi.biemo.business.domain.Node;
import com.ruoyi.biemo.business.domain.Relationship;
import com.ruoyi.biemo.business.domain.WordCloudItem;
import com.ruoyi.biemo.business.domain.*;
import com.ruoyi.biemo.business.domain.event.DocInfoDeleteEvent;
import com.ruoyi.biemo.business.domain.event.DocInfoSaveEvent;
import com.ruoyi.biemo.business.response.MyResultResponse;
@ -15,6 +12,7 @@ import com.ruoyi.biemo.core.page.Page;
import com.ruoyi.biemo.elasticsearch.util.EsService;
import com.ruoyi.biemo.mongodb.utils.MongoHelper;
import com.ruoyi.biemo.nlp.DependencyParserUtils;
import com.ruoyi.biemo.nlp.SentimentAnalysisUtils;
import com.ruoyi.biemo.nlp.SummaryUtils;
import com.ruoyi.biemo.utils.MyObjects;
import com.ruoyi.common.core.domain.AjaxResult;
@ -238,7 +236,7 @@ public class DocInfoService extends EsService<DocInfo> {
}
// 获取命名实体
try {
Map<String,Set<String>> nerTagSet = DependencyParserUtils.getMyNERTagSet(strArr);
Map<String, Set<String>> nerTagSet = DependencyParserUtils.getMyNERTagSet(strArr);
docInfo.setParserNamedEntity(JSONObject.toJSONString(nerTagSet));
docInfo.setSummary(SummaryUtils.autoSummary(content));
} catch (Exception e) {
@ -344,19 +342,19 @@ public class DocInfoService extends EsService<DocInfo> {
}
}
MyResultResponse myResultResponse = new MyResultResponse();
List<MyResultResponse.Datas> results = new ArrayList<>();
List<MyResultResponse.Datas> results = new ArrayList<>();
List<MyResultResponse.Datas> results2 = response2.getResults();
List<MyResultResponse.Datas> results3 = response3.getResults();
results.addAll(0,results3);
results.addAll(1,results2);
results.addAll(0, results3);
results.addAll(1, results2);
List<String> errors = myResultResponse.getErrors();
errors.addAll(response2.getErrors());
errors.addAll(response3.getErrors());
myResultResponse.setResults(results);
myResultResponse.setErrors(errors);
return myResultResponse;
return myResultResponse;
}
//文章管理--批量分析
@ -449,28 +447,29 @@ public class DocInfoService extends EsService<DocInfo> {
// insertOrUpdateDocInfo(docInfo);
return response;
}
public List<WordCloudItem> getWordCloudByCateId(String categoryId) {
String regex = "<.*?>"; // 匹配HTML标签的正则表达式
Map<String,Integer> temp = new ConcurrentHashMap<>();
Map<String, Integer> temp = new ConcurrentHashMap<>();
List<WordCloudItem> wordCloudItemList = new ArrayList<>();
DocInfo docInfo = new DocInfo();
docInfo.setCateId(categoryId);
List<DocInfo> docInfoList = selectDocInfoList(docInfo);
if(CollectionUtils.isNotEmpty(docInfoList)){
List<Term> termList = docInfoList.parallelStream().filter(ObjectUtils::isNotEmpty).flatMap(_docInfo -> NLPTokenizer.segment(_docInfo.getContent().replaceAll(regex, "").replaceAll("\\s+","").replaceAll("[,  '“”.。]", "").trim()).stream()).collect(Collectors.toList());
if(CollectionUtils.isNotEmpty(termList)){
if (CollectionUtils.isNotEmpty(docInfoList)) {
List<Term> termList = docInfoList.parallelStream().filter(ObjectUtils::isNotEmpty).flatMap(_docInfo -> NLPTokenizer.segment(_docInfo.getContent().replaceAll(regex, "").replaceAll("\\s+", "").replaceAll("[,  '“”.。]", "").trim()).stream()).collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(termList)) {
termList.parallelStream().forEach(term -> {
String word = term.word;
Integer value = term.getFrequency();
if(!temp.containsKey(word)){
temp.put(word,1);
}else{
temp.put(word,temp.get(word)+1);
if (!temp.containsKey(word)) {
temp.put(word, 1);
} else {
temp.put(word, temp.get(word) + 1);
}
});
}
}
for(Map.Entry<String,Integer> entry : temp.entrySet()){
for (Map.Entry<String, Integer> entry : temp.entrySet()) {
WordCloudItem wordCloudItem = new WordCloudItem();
wordCloudItem.setName(entry.getKey());
wordCloudItem.setValue(entry.getValue());
@ -478,4 +477,48 @@ public class DocInfoService extends EsService<DocInfo> {
}
return wordCloudItemList;
}
public EmotionResult getEmotionAnalysis(String categoryId) {
String regex = "<.*?>"; // 匹配HTML标签的正则表达式
Map<String, Integer> temp = new ConcurrentHashMap<>();
// List<EmotionResult> emotionResultItemList = new ArrayList<>();
EmotionResult emotionResult1 = new EmotionResult();
DocInfo docInfo = new DocInfo();
docInfo.setCateId(categoryId);
List<DocInfo> docInfoList = selectDocInfoList(docInfo);
if (CollectionUtils.isNotEmpty(docInfoList)) {
List<Term> termList = docInfoList.parallelStream().filter(ObjectUtils::isNotEmpty).flatMap(_docInfo -> NLPTokenizer.segment(_docInfo.getContent().replaceAll(regex, "").replaceAll("\\s+", "").replaceAll("[,  '“”.。]", "").trim()).stream()).collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(termList)) {
termList.parallelStream().forEach(term -> {
String word = term.word;
Integer value = term.getFrequency();
if (!temp.containsKey(word)) {
temp.put(word, 1);
} else {
temp.put(word, temp.get(word) + 1);
}
});
}
}
int count = 0;
int count2 = 0;
for (Map.Entry<String, Integer> entry : temp.entrySet()) {
EmotionResult emotionResult = new EmotionResult();
String key = entry.getKey();
String analysis = SentimentAnalysisUtils.analysis(key);
if (analysis.equals("正面")) {
count++;
} else {
count2++;
}
emotionResult.setUpCount(count);
emotionResult.setDownCount(count2);
// emotionResultItemList.add(emotionResult);
emotionResult1 = emotionResult;
}
emotionResult1.setDownName("负面");
emotionResult1.setUpName("正面");
return emotionResult1;
}
}

Loading…
Cancel
Save