19 package org.sleuthkit.autopsy.modules.hashdatabase;
 
   22 import java.io.InputStreamReader;
 
   23 import java.io.FileInputStream;
 
   24 import java.io.IOException;
 
   25 import java.util.logging.Level;
 
   26 import java.util.Iterator;
 
   27 import org.apache.commons.csv.CSVFormat;
 
   28 import org.apache.commons.csv.CSVParser;
 
   29 import org.apache.commons.csv.CSVRecord;
 
   50             File importFile = 
new File(filename);
 
   51             long fileSize = importFile.length();
 
   52             expectedHashCount = fileSize / 75 + 1; 
 
   55             inputStreamReader = 
new InputStreamReader(
new FileInputStream(filename)); 
 
   57             csvParser = CSVFormat.RFC4180.builder().setHeader().setSkipHeaderRecord(
true).build().parse(inputStreamReader);
 
   59             if (!csvParser.getHeaderMap().keySet().contains(
"hash")) {
 
   61                 throw new TskCoreException(
"Hashkeeper file format invalid - does not contain 'hash' column");
 
   65             hashColumnIndex = csvParser.getHeaderMap().get(
"hash");
 
   68             recordIterator = csvParser.getRecords().listIterator();
 
   71         } 
catch (IOException ex) {
 
   86         if (recordIterator.hasNext()) {
 
   87             CSVRecord record = recordIterator.next();
 
   88             String hash = record.get(hashColumnIndex);
 
   90             if (hash.length() != 32) {
 
   91                 throw new TskCoreException(
"Hash has incorrect length: " + hash);
 
  106         return (!recordIterator.hasNext());
 
  125         if (inputStreamReader != null) {
 
  127                 inputStreamReader.close();
 
  128             } 
catch (IOException ex) {
 
  131                 inputStreamReader = null;
 
final Iterator< CSVRecord > recordIterator
 
InputStreamReader inputStreamReader
 
final long expectedHashCount
 
long getExpectedHashCount()
 
synchronized static Logger getLogger(String name)
 
final int hashColumnIndex