赞
踩
以下都是我在CSDN发布是文章好吧。
数据库课程设计——某工厂的物料管理系统(附Java源码与课程设计报告)
数据库课程设计——某商店进销存管理系统(附Java源码与课程设计报告)
数据库课程设计——某煤气公司送气管理系统(附课设报告)
数据库课程设计——基于JavaEE的企业进销存系统(附Java源码与课程设计报告)
Java课程设计——哈夫曼编码译码系统的Java语言实现
C语言课程设计——班级成绩管理系统(附课设报告)
C语言课程设计——排班系统 DFS解决(附课设报告)
import java.io.*; import java.text.NumberFormat; import java.util.HashMap; import java.util.Map; import java.util.PriorityQueue; public class HuffmanEncoder implements DataShare { private String fileName; private Node huffmanTree; private HashMap<Character, Integer> charFrequencies; private HashMap<Character, String> huffmanCodeMap; private String text; private String encodedText; /*private static final String treeExportPath = "G:\\project\\test data\\HuffmanTree.txt"; private static final String encodedTextExportPath = "G:\\project\\test data\\EncodedText.txt";*/ private static final String treeExportPath = "HuffmanTree.txt"; private static final String encodedTextExportPath = "EncodedText.txt"; HuffmanEncoder(String fileName) { // just don't want to fold the code this.fileName = fileName; charFrequencies = new HashMap<>(); } public String getEncodedText() { // just don't want to fold the code return encodedText; } public String getText() { // just don't want to fold the code return text; } private void getCharFrequencies() { int ch; StringBuilder textBuilder = new StringBuilder(); try { Reader in = new FileReader(fileName); while ((ch = in.read()) != -1) { char c = (char)ch; textBuilder.append(c); charFrequencies.put(c, charFrequencies.containsKey(c) ? charFrequencies.get(c) + 1 : 1); } in.close(); } catch (IOException e) { e.printStackTrace(); } text = textBuilder.toString(); } private void createHuffmanTree() { // 优先级队列的元素按照其自然顺序进行排序,或者根据构造队列时提供的 Comparator 进行排序,具体取决于所使用的构造方法 PriorityQueue<Node> priorityQueue = new PriorityQueue<Node>(); // Map.Entry<K,V>接口 映射项(键-值对) // 类HashMap<K,V>的方法 Set<Map.Entry<K,V>> entrySet() 返回此映射所包含的映射关系的 Set 视图 for (Map.Entry<Character, Integer> kv : charFrequencies.entrySet()) { priorityQueue.add(new Node(kv.getKey(), kv.getValue(), null, null)); } while (priorityQueue.size() > 1) { Node node1 = priorityQueue.poll(); // System.out.print(node1.ch); Node node2 = priorityQueue.poll(); // System.out.print(node2.ch); priorityQueue.add(new Node('\0', node1.freq + node2.freq, node1, node2)); } huffmanTree = priorityQueue.poll(); } private void createHuffmanCode(Node node, HashMap<Character, String> hashMap, String string) { if (node.lChild == null && node.rChild == null) { hashMap.put(node.ch, string); return; } createHuffmanCode(node.lChild, hashMap, string + '0'); createHuffmanCode(node.rChild, hashMap, string + '1'); } private void getHuffmanCode() { huffmanCodeMap = new HashMap<>(); createHuffmanCode(huffmanTree, huffmanCodeMap, ""); } private void encode() { StringBuilder encodedTextBuilder = new StringBuilder(); for (char ch : text.toCharArray()) { encodedTextBuilder.append(huffmanCodeMap.get(ch)); } encodedText = encodedTextBuilder.toString(); /*try { int ch; Reader in = new FileReader(fileName); while ((ch = in.read()) != -1) { encodedTextBuilder.append(huffmanCodeMap.get((char) ch)); } in.close(); } catch (IOException e) { e.printStackTrace(); } encodedText = encodedTextBuilder.toString();*/ } private void exportEncodedText() throws IOException { // G:\project\test data\Red history thriving along highway.txt File file = new File(encodedTextExportPath); if (file.createNewFile()) { RandomAccessFile out = new RandomAccessFile(file, "rw"); out.writeBytes(encodedText); } } private void exportHuffmanTree() { File file = new File(treeExportPath); try { FileOutputStream fileOutputStream = new FileOutputStream(file); ObjectOutputStream objectOutputStream = new ObjectOutputStream(fileOutputStream); objectOutputStream.writeObject(huffmanTree); objectOutputStream.close(); } catch (IOException e) { e.printStackTrace(); } } public String calculateCompressionRatio() { File text = new File(fileName); File encodedText = new File(encodedTextExportPath); /*System.out.println(text.length()); System.out.println(encodedText.length());*/ String ratio = String.format("%.4f", (double)encodedText.length() / (double)text.length() / 16.0); return NumberFormat.getPercentInstance().format(Double.parseDouble(ratio)); } public Object[][] getFileData() { File textFile = new File(fileName); File encodedTextFile = new File(encodedTextExportPath); Object[][] fileData = new Object[2][3]; fileData[0][0] = textFile.getName(); fileData[0][1] = textFile.length(); fileData[0][2] = textFile.getAbsolutePath(); fileData[1][0] = encodedTextFile.getName(); fileData[1][1] = encodedTextFile.length() / 16; fileData[1][2] = encodedTextFile.getAbsolutePath(); return fileData; } public Object[][] getCodingData() { Object[][] codingData = new Object[charFrequencies.size()][3]; int count = 0; for (Map.Entry<Character, Integer> kv : charFrequencies.entrySet()) { codingData[count][0] = kv.getKey(); codingData[count][1] = kv.getValue(); ++count; } count = 0; for (Map.Entry<Character, String> kv : huffmanCodeMap.entrySet()) { codingData[count][2] = kv.getValue(); ++count; } return codingData; } public void execute() throws IOException { getCharFrequencies(); createHuffmanTree(); getHuffmanCode(); encode(); exportHuffmanTree(); exportEncodedText(); } /*public void showHuffmanCodeMap() { for (Map.Entry<Character, String> kv : huffmanCodeMap.entrySet()) { System.out.println(kv.getKey() + " : " + kv.getValue()); } } public void showCharFrequencies() { for (Map.Entry<Character, Integer> kv : charFrequencies.entrySet()) { System.out.println(kv.getKey() + " : " + kv.getValue()); } } public static void main(String[] args) throws IOException { HuffmanEncoder en = new HuffmanEncoder(); en.setFileName("G:\\project\\src\\test.txt"); en.getCharFrequencies(); en.createHuffmanTree(); en.getHuffmanCode(); en.encode(); en.exportHuffmanTree(); en.showCharFrequencies(); en.exportEncodedText(); System.out.println(en.encodedText); System.out.println(en.calculateCompressionRatio()); }*/ }
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。