溫馨提示×

您好,登錄后才能下訂單哦!

密碼登錄×
登錄注冊(cè)×
其他方式登錄
點(diǎn)擊 登錄注冊(cè) 即表示同意《億速云用戶(hù)服務(wù)條款》

java 實(shí)現(xiàn)DFA 算法(理論百度搜索)

發(fā)布時(shí)間:2020-07-17 21:28:16 來(lái)源:網(wǎng)絡(luò) 閱讀:1009 作者:NopSmile 欄目:編程語(yǔ)言

DFA簡(jiǎn)介

DFA全稱(chēng)為:Deterministic Finite Automaton,即確定有窮自動(dòng)機(jī)。(自己百度吧)


直接代碼:

敏感詞實(shí)體類(lèi)

package com.nopsmile.dfa;

public class Keywords {
    private String pid;
    private String Content;

    public Keywords() {

    }

    public Keywords(String content) {
        super();
        Content = content;
    }

    public String getContent() {
        return Content;
    }

    public void setContent(String content) {
        Content = content;
    }

    public String getPid() {
        return pid;
    }

    public void setPid(String pid) {
        this.pid = pid;
    }

}

敏感詞庫(kù)初始化

package com.nopsmile.dfa;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

/**
 * 敏感詞庫(kù)初始化
 * 
 */
public class SensitiveWordInit{

    /**
     * 敏感詞庫(kù)
     */
    public HashMap sensitiveWordMap;

    /**
     * 初始化敏感詞 keywords
     */
   public Map initKeyWord(List<Keywords> sensitiveWords) {
        try {
            // 從敏感詞集合對(duì)象中取出敏感詞并封裝到Set集合中
            Set<String> keyWordSet = new HashSet<String>();
            for (Keywords s : sensitiveWords) {
                keyWordSet.add(s.getContent().trim());
            }
            // 將敏感詞庫(kù)加入到HashMap中
            addSensitiveWordToHashMap(keyWordSet);
        } catch (Exception e) {
            e.printStackTrace();
        }
        return sensitiveWordMap;
    }

    /**
     * 封裝敏感詞庫(kù)
     */
    private void addSensitiveWordToHashMap(Set<String> keyWordSet) {
        // 初始化HashMap對(duì)象并控制容器的大小
        sensitiveWordMap = new HashMap(keyWordSet.size());
        // 敏感詞
        String key = null;
        // 用來(lái)按照相應(yīng)的格式保存敏感詞庫(kù)數(shù)據(jù)
        Map nowMap = null;
        // 用來(lái)輔助構(gòu)建敏感詞庫(kù)
        Map<String, String> newWorMap = null;
        // 使用一個(gè)迭代器來(lái)循環(huán)敏感詞集合
        Iterator<String> iterator = keyWordSet.iterator();
        while (iterator.hasNext()) {
            key = iterator.next();
            // 等于敏感詞庫(kù),HashMap對(duì)象在內(nèi)存中占用的是同一個(gè)地址,所以此nowMap對(duì)象的變化,sensitiveWordMap對(duì)象也會(huì)跟著改變
            nowMap = sensitiveWordMap;
            for (int i = 0; i < key.length(); i++) {
                // 截取敏感詞當(dāng)中的字,在敏感詞庫(kù)中字為HashMap對(duì)象的Key鍵值
                char keyChar = key.charAt(i);

                // 判斷這個(gè)字是否存在于敏感詞庫(kù)中
                Object wordMap = nowMap.get(keyChar);
                if (wordMap != null) {
                    nowMap = (Map) wordMap;
                } else {
                    newWorMap = new HashMap<String, String>();
                    newWorMap.put("isEnd", "0");
                    nowMap.put(keyChar, newWorMap);
                    nowMap = newWorMap;
                }

                // 如果該字是當(dāng)前敏感詞的最后一個(gè)字,則標(biāo)識(shí)為結(jié)尾字
                if (i == key.length() - 1) {
                    nowMap.put("isEnd", "1");
                }
            }
        }
    }
}

自定義的工具類(lèi)

package com.nopsmile.dfa;

import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;

import com.alibaba.fastjson.JSONArray;

import net.sf.json.JSONObject;

/**
 * 敏感詞過(guò)濾工具類(lèi)
 * 
 * @author AlanLee
 *
 */
public class SensitivewordUtils {
    /**
     * 敏感詞庫(kù)
     */
    public static Map sensitiveWordMap = null;

    /**
     * 只過(guò)濾最小敏感詞
     */
    public static int minMatchTYpe = 1;

    /**
     * 過(guò)濾所有敏感詞
     */
    public static int maxMatchType = 2;

    /**
     * 敏感詞庫(kù)敏感詞數(shù)量
     *
     * @return
     */
    public static int getWordSize() {
        if (SensitivewordUtils.sensitiveWordMap == null) {
            return 0;
        }
        return SensitivewordUtils.sensitiveWordMap.size();
    }

    /**
     * 是否包含敏感詞
     *
     */
    public static boolean isContaintSensitiveWord(String txt, int matchType) {
        boolean flag = false;
        for (int i = 0; i < txt.length(); i++) {
            int matchFlag = checkSensitiveWord(txt, i, matchType);
            if (matchFlag > 0) {
                flag = true;
            }
        }
        return flag;
    }

    /**
     * 獲取敏感詞內(nèi)容
     *
     * @param txt
     * @param matchType
     * @return 敏感詞內(nèi)容
     */
    public static Set<String> getSensitiveWord(String txt, int matchType) {
        Set<String> sensitiveWordList = new HashSet<String>();

        for (int i = 0; i < txt.length(); i++) {
            int length = checkSensitiveWord(txt, i, matchType);
            if (length > 0) {
                // 將檢測(cè)出的敏感詞保存到集合中
                sensitiveWordList.add(txt.substring(i, i + length));
                i = i + length - 1;
            }
        }

        return sensitiveWordList;
    }

    /**
     * 替換敏感詞
     *
     */
    public static String replaceSensitiveWord(String txt, int matchType, String replaceChar) {
        String resultTxt = txt;
        Set<String> set = getSensitiveWord(txt, matchType);
        Iterator<String> iterator = set.iterator();
        String word = null;
        String replaceString = null;
        while (iterator.hasNext()) {
            word = iterator.next();
            replaceString = getReplaceChars(replaceChar, word.length());
            resultTxt = resultTxt.replaceAll(word, replaceString);
        }

        return resultTxt;
    }

    /**
     * 替換敏感詞內(nèi)容
     *
     */
    private static String getReplaceChars(String replaceChar, int length) {
        String resultReplace = replaceChar;
        for (int i = 1; i < length; i++) {
            resultReplace += replaceChar;
        }

        return resultReplace;
    }

    /**
     * 檢查敏感詞數(shù)量
     *
     */
    public static int checkSensitiveWord(String txt, int beginIndex, int matchType) {
        boolean flag = false;
        // 記錄敏感詞數(shù)量
        int matchFlag = 0;
        char word = 0;
        Map nowMap = SensitivewordUtils.sensitiveWordMap;
        for (int i = beginIndex; i < txt.length(); i++) {
            word = txt.charAt(i);
            // 判斷該字是否存在于敏感詞庫(kù)中
            nowMap = (Map) nowMap.get(word);
            if (nowMap != null) {
                matchFlag++;
                // 判斷是否是敏感詞的結(jié)尾字,如果是結(jié)尾字則判斷是否繼續(xù)檢測(cè)
                if ("1".equals(nowMap.get("isEnd"))) {
                    flag = true;
                    // 判斷過(guò)濾類(lèi)型,如果是小過(guò)濾則跳出循環(huán),否則繼續(xù)循環(huán)
                    if (SensitivewordUtils.minMatchTYpe == matchType) {
                        break;
                    }
                }
            } else {
                break;
            }
        }
        if (!flag) {
            matchFlag = 0;
        }
        return matchFlag;
    }

    /**
     * 敏感詞匯對(duì)應(yīng)個(gè)數(shù)
     * 返回 "關(guān)鍵字"="關(guān)鍵字個(gè)數(shù)"
     *
     */
    public static Map getSensitiveWordSum(String txt, int matchType) {
        Map<String,Integer> map = new HashMap<String,Integer>();
        for (int i = 0; i < txt.length(); i++) {
            int length = checkSensitiveWord(txt, i, matchType);
            if (length > 0) {
                // 將檢測(cè)出的敏感詞保存到集合中
                String str=txt.substring(i, i + length);
                if(map.containsKey(str)) {
                    map.put(str, map.get(str).intValue()+1);
                }else {
                    map.put(str, new Integer(1));
                }
                //System.out.println(txt.substring(i, i + length));
                i = i + length - 1;
            }
        }
        return map;
    }

    /**
     *  對(duì)map數(shù)組value排序,并取前10
     *  this method will always sort the map;
     *  isCondition is true condition can be used otherwise invalid
     * @param unsortMap
     * @return
     */
    public static Map<String, Integer> sortByValue(Map<String, Integer> unsortMap,int condition,boolean isCondition) {

        // 1. Convert Map to List of Map
        List<Map.Entry<String, Integer>> list =
                new LinkedList<Map.Entry<String, Integer>>(unsortMap.entrySet());

        // 2. Sort list with Collections.sort(), provide a custom Comparator
        //    Try switch the o1 o2 position for a different order
        Collections.sort(list, new Comparator<Map.Entry<String, Integer>>() {
            public int compare(Map.Entry<String, Integer> o1,
                               Map.Entry<String, Integer> o2) {
                return (o2.getValue()).compareTo(o1.getValue());
            }
        });

        // 3. Loop the sorted list and put it into a new insertion order Map LinkedHashMap
        Map<String, Integer> sortedMap = new LinkedHashMap<String, Integer>();
        if(isCondition) {
            for (int i = 0; i < list.size(); i++) {
                if (i < condition) {
                    sortedMap.put(list.get(i).getKey(), list.get(i).getValue());
                }
            }
        }else{
            for (int i = 0; i < list.size(); i++) {
                    sortedMap.put(list.get(i).getKey(), list.get(i).getValue());
            }
        }
        return sortedMap;
    }

}

使用上面類(lèi)流程代碼

Keywords ss=new Keywords("好");
List list = new ArrayList();
list.add(ss);

SensitiveWordInit sensitiveWordInit = new SensitiveWordInit();
Map sensitiveWordMap   = sensitiveWordInit.initKeyWord(list);

// 傳入SensitivewordEngine類(lèi)中的敏感詞庫(kù)
SensitivewordUtils.sensitiveWordMap = sensitiveWordMap;

SensitivewordUtils.getSensitiveWordSum("需要檢測(cè)的文本", 2)    ;
向AI問(wèn)一下細(xì)節(jié)

免責(zé)聲明:本站發(fā)布的內(nèi)容(圖片、視頻和文字)以原創(chuàng)、轉(zhuǎn)載和分享為主,文章觀點(diǎn)不代表本網(wǎng)站立場(chǎng),如果涉及侵權(quán)請(qǐng)聯(lián)系站長(zhǎng)郵箱:is@yisu.com進(jìn)行舉報(bào),并提供相關(guān)證據(jù),一經(jīng)查實(shí),將立刻刪除涉嫌侵權(quán)內(nèi)容。

AI