19 package org.sleuthkit.autopsy.modules.hashdatabase;
22 import java.io.InputStreamReader;
23 import java.io.FileInputStream;
24 import java.io.IOException;
25 import java.util.logging.Level;
26 import java.util.Iterator;
27 import org.apache.commons.csv.CSVFormat;
28 import org.apache.commons.csv.CSVParser;
29 import org.apache.commons.csv.CSVRecord;
50 File importFile =
new File(filename);
51 long fileSize = importFile.length();
52 expectedHashCount = fileSize / 75 + 1;
55 inputStreamReader =
new InputStreamReader(
new FileInputStream(filename));
56 csvParser = CSVFormat.RFC4180.withFirstRecordAsHeader().parse(inputStreamReader);
57 if (!csvParser.getHeaderMap().keySet().contains(
"hash")) {
59 throw new TskCoreException(
"Hashkeeper file format invalid - does not contain 'hash' column");
63 hashColumnIndex = csvParser.getHeaderMap().get(
"hash");
66 recordIterator = csvParser.getRecords().listIterator();
69 }
catch (IOException ex) {
71 throw new TskCoreException(
"Error reading " + filename, ex);
84 if (recordIterator.hasNext()) {
85 CSVRecord record = recordIterator.next();
86 String hash = record.get(hashColumnIndex);
88 if (hash.length() != 32) {
89 throw new TskCoreException(
"Hash has incorrect length: " + hash);
104 return (!recordIterator.hasNext());
123 if (inputStreamReader != null) {
125 inputStreamReader.close();
126 }
catch (IOException ex) {
129 inputStreamReader = null;
final Iterator< CSVRecord > recordIterator
InputStreamReader inputStreamReader
final long expectedHashCount
long getExpectedHashCount()
synchronized static Logger getLogger(String name)
final int hashColumnIndex