19 package org.sleuthkit.autopsy.keywordsearch;
21 import java.io.BufferedReader;
22 import java.io.Reader;
23 import java.util.Collections;
24 import java.util.HashMap;
26 import java.util.Optional;
27 import java.util.logging.Level;
28 import org.apache.commons.lang3.math.NumberUtils;
29 import org.apache.solr.client.solrj.SolrServerException;
30 import org.apache.solr.common.SolrInputDocument;
31 import org.openide.util.NbBundle;
59 private static final Logger logger = Logger.getLogger(Ingester.class.getName());
60 private volatile boolean uncommitedIngests =
false;
61 private final Server solrServer = KeywordSearch.getServer();
62 private static final SolrFieldsVisitor SOLR_FIELDS_VISITOR =
new SolrFieldsVisitor();
63 private static Ingester instance;
64 private final LanguageSpecificContentIndexingHelper languageSpecificContentIndexingHelper
65 =
new LanguageSpecificContentIndexingHelper();
70 public static synchronized Ingester getDefault() {
71 if (instance == null) {
72 instance =
new Ingester();
79 @SuppressWarnings(
"FinalizeDeclaration")
80 protected
void finalize() throws Throwable {
84 if (uncommitedIngests) {
85 logger.warning(
"Ingester was used to add files that it never committed.");
99 void indexMetaDataOnly(AbstractFile file)
throws IngesterException {
100 indexChunk(
"",
"", file.getName().toLowerCase(),
new HashMap<>(getContentFields(file)));
113 void indexMetaDataOnly(BlackboardArtifact artifact, String sourceName)
throws IngesterException {
114 indexChunk(
"",
"", sourceName,
new HashMap<>(getContentFields(artifact)));
125 private Map<String, String> getContentFields(SleuthkitVisitableItem item) {
126 return item.accept(SOLR_FIELDS_VISITOR);
147 < T extends SleuthkitVisitableItem>
boolean indexText(Reader sourceReader,
long sourceID, String sourceName, T source, IngestJobContext context)
throws Ingester.IngesterException {
148 boolean doLanguageDetection =
true;
149 return indexText(sourceReader, sourceID, sourceName, source, context, doLanguageDetection);
171 < T extends SleuthkitVisitableItem>
boolean indexStrings(Reader sourceReader,
long sourceID, String sourceName, T source, IngestJobContext context)
throws Ingester.IngesterException {
173 boolean doLanguageDetection =
false;
174 return indexText(sourceReader, sourceID, sourceName, source, context, doLanguageDetection);
196 private < T extends SleuthkitVisitableItem>
boolean indexText(Reader sourceReader,
long sourceID, String sourceName, T source, IngestJobContext context,
boolean doLanguageDetection)
throws Ingester.IngesterException {
199 Map<String, String> contentFields = Collections.unmodifiableMap(getContentFields(source));
201 try (BufferedReader reader =
new BufferedReader(sourceReader)) {
202 Chunker chunker =
new Chunker(reader);
203 while (chunker.hasNext()) {
204 if (context != null && context.fileIngestIsCancelled()) {
205 logger.log(Level.INFO,
"File ingest cancelled. Cancelling keyword search indexing of {0}", sourceName);
209 Chunk chunk = chunker.next();
210 Map<String, Object> fields =
new HashMap<>(contentFields);
211 String chunkId = Server.getChunkIdString(sourceID, numChunks + 1);
212 fields.put(Server.Schema.ID.toString(), chunkId);
213 fields.put(Server.Schema.CHUNK_SIZE.toString(), String.valueOf(chunk.getBaseChunkLength()));
214 Optional<Language> language = Optional.empty();
215 if (doLanguageDetection) {
216 language = languageSpecificContentIndexingHelper.detectLanguageIfNeeded(chunk);
217 language.ifPresent(lang -> languageSpecificContentIndexingHelper.updateLanguageSpecificFields(fields, chunk, lang));
221 indexChunk(chunk.toString(), chunk.geLowerCasedChunk(), sourceName, fields);
223 if (chunker.hasNext() && language.isPresent()) {
224 languageSpecificContentIndexingHelper.indexMiniChunk(chunk, sourceName,
new HashMap<>(contentFields), chunkId, language.get());
227 }
catch (Ingester.IngesterException ingEx) {
228 logger.log(Level.WARNING,
"Ingester had a problem with extracted string from file '"
229 + sourceName +
"' (id: " + sourceID +
").", ingEx);
234 if (chunker.hasException()) {
235 logger.log(Level.WARNING,
"Error chunking content from " + sourceID +
": " + sourceName, chunker.getException());
238 }
catch (Exception ex) {
239 logger.log(Level.WARNING,
"Unexpected error, can't read content stream from " + sourceID +
": " + sourceName, ex);
242 if (context != null && context.fileIngestIsCancelled()) {
245 Map<String, Object> fields =
new HashMap<>(contentFields);
247 fields.put(Server.Schema.NUM_CHUNKS.toString(), Integer.toString(numChunks));
249 fields.put(Server.Schema.ID.toString(), Long.toString(sourceID));
251 fields.remove(Server.Schema.CHUNK_SIZE.toString());
252 indexChunk(null, null, sourceName, fields);
272 private void indexChunk(String chunk, String lowerCasedChunk, String sourceName, Map<String, Object> fields)
throws IngesterException {
273 if (fields.get(Server.Schema.IMAGE_ID.toString()) == null) {
278 String msg = NbBundle.getMessage(Ingester.class,
279 "Ingester.ingest.exception.unknownImgId.msg", sourceName);
280 logger.log(Level.SEVERE, msg);
281 throw new IngesterException(msg);
285 SolrInputDocument updateDoc =
new SolrInputDocument();
286 for (String key : fields.keySet()) {
287 if (fields.get(key).getClass() == String.class) {
288 updateDoc.addField(key, Chunker.sanitize((String)fields.get(key)).toString());
290 updateDoc.addField(key, fields.get(key));
299 updateDoc.addField(Server.Schema.CONTENT.toString(), chunk);
303 double indexSchemaVersion = NumberUtils.toDouble(solrServer.getIndexInfo().getSchemaVersion());
304 if (indexSchemaVersion >= 2.1) {
305 updateDoc.addField(Server.Schema.CONTENT_STR.toString(), ((chunk == null) ?
"" : lowerCasedChunk));
308 TimingMetric metric = HealthMonitor.getTimingMetric(
"Solr: Index chunk");
310 solrServer.addDocument(updateDoc);
311 HealthMonitor.submitTimingMetric(metric);
312 uncommitedIngests =
true;
314 }
catch (KeywordSearchModuleException | NoOpenCoreException ex) {
316 throw new IngesterException(
317 NbBundle.getMessage(Ingester.class,
"Ingester.ingest.exception.err.msg", sourceName), ex);
328 uncommitedIngests =
false;
329 }
catch (NoOpenCoreException | SolrServerException ex) {
330 logger.log(Level.WARNING,
"Error commiting index", ex);
338 static private class SolrFieldsVisitor extends SleuthkitItemVisitor.Default<Map<String, String>> {
341 protected Map<String, String>
defaultVisit(SleuthkitVisitableItem svi) {
342 return new HashMap<>();
346 public Map<String, String>
visit(File f) {
351 public Map<String, String>
visit(DerivedFile df) {
356 public Map<String, String>
visit(Directory d) {
361 public Map<String, String>
visit(LocalDirectory ld) {
366 public Map<String, String>
visit(LayoutFile lf) {
372 public Map<String, String>
visit(LocalFile lf) {
377 public Map<String, String>
visit(SlackFile f) {
408 Map<String, String> params =
new HashMap<>();
409 params.put(
Server.
Schema.ID.toString(), Long.toString(file.getId()));
411 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(file.getDataSource().getId()));
412 }
catch (TskCoreException ex) {
413 logger.log(Level.SEVERE,
"Could not get data source id to properly index the file " + file.getId(), ex);
414 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(-1));
416 params.put(
Server.
Schema.FILE_NAME.toString(), file.getName().toLowerCase());
428 public Map<String, String>
visit(BlackboardArtifact artifact) {
429 Map<String, String> params =
new HashMap<>();
430 params.put(
Server.
Schema.ID.toString(), Long.toString(artifact.getArtifactID()));
432 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(artifact.getDataSource().getId()));
433 }
catch (TskCoreException ex) {
434 logger.log(Level.SEVERE,
"Could not get data source id to properly index the artifact " + artifact.getArtifactID(), ex);
435 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(-1));
448 public Map<String, String>
visit(Report report) {
449 Map<String, String> params =
new HashMap<>();
450 params.put(
Server.
Schema.ID.toString(), Long.toString(report.getId()));
452 Content dataSource = report.getDataSource();
453 if (null == dataSource) {
454 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(-1));
456 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(dataSource.getId()));
458 }
catch (TskCoreException ex) {
459 logger.log(Level.SEVERE,
"Could not get data source id to properly index the report, using default value. Id: " + report.getId(), ex);
460 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(-1));
470 static class IngesterException
extends Exception {
472 private static final long serialVersionUID = 1L;
474 IngesterException(String message, Throwable ex) {
478 IngesterException(String message) {
Map< String, String > visit(Report report)
Map< String, String > visit(LayoutFile lf)
Map< String, String > visit(File f)
Map< String, String > visit(LocalDirectory ld)
Map< String, String > getCommonAndMACTimeFields(AbstractFile file)
Map< String, String > visit(SlackFile f)
Map< String, String > visit(Directory d)
static String getStringTimeISO8601(long epochSeconds, TimeZone tzone)
Map< String, String > getCommonFields(AbstractFile file)
Map< String, String > visit(DerivedFile df)
Map< String, String > visit(BlackboardArtifact artifact)
Map< String, String > visit(LocalFile lf)
Map< String, String > defaultVisit(SleuthkitVisitableItem svi)