19 package org.sleuthkit.autopsy.keywordsearch;
21 import java.io.BufferedReader;
22 import java.util.HashMap;
24 import java.util.logging.Level;
25 import org.apache.commons.lang3.math.NumberUtils;
26 import org.apache.solr.client.solrj.SolrServerException;
27 import org.apache.solr.common.SolrInputDocument;
28 import org.openide.util.NbBundle;
56 private static final Logger logger = Logger.getLogger(Ingester.class.getName());
57 private volatile boolean uncommitedIngests =
false;
58 private final Server solrServer = KeywordSearch.getServer();
59 private static final SolrFieldsVisitor SOLR_FIELDS_VISITOR =
new SolrFieldsVisitor();
60 private static Ingester instance;
61 private static final int SINGLE_READ_CHARS = 512;
66 public static synchronized Ingester getDefault() {
67 if (instance == null) {
68 instance =
new Ingester();
75 @SuppressWarnings(
"FinalizeDeclaration")
76 protected
void finalize() throws Throwable {
80 if (uncommitedIngests) {
81 logger.warning(
"Ingester was used to add files that it never committed.");
95 void indexMetaDataOnly(AbstractFile file)
throws IngesterException {
96 indexChunk(
"", file.getName().toLowerCase(), getContentFields(file));
109 void indexMetaDataOnly(BlackboardArtifact artifact)
throws IngesterException {
110 indexChunk(
"",
new ArtifactTextExtractor().getName(artifact), getContentFields(artifact));
121 private Map<String, String> getContentFields(SleuthkitVisitableItem item) {
122 return item.accept(SOLR_FIELDS_VISITOR);
145 < T extends SleuthkitVisitableItem>
boolean indexText(TextExtractor< T> extractor, T source, IngestJobContext context)
throws Ingester.IngesterException {
146 final long sourceID = extractor.getID(source);
147 final String sourceName = extractor.getName(source);
151 if (extractor.isDisabled()) {
159 Map<String, String> fields = getContentFields(source);
161 try (BufferedReader reader =
new BufferedReader(extractor.getReader(source));) {
162 Chunker chunker =
new Chunker(reader);
163 for (Chunk chunk : chunker) {
164 if (context != null && context.fileIngestIsCancelled()) {
165 logger.log(Level.INFO,
"File ingest cancelled. Cancelling keyword search indexing of {0}", sourceName);
168 String chunkId = Server.getChunkIdString(sourceID, numChunks + 1);
169 fields.put(Server.Schema.ID.toString(), chunkId);
170 fields.put(Server.Schema.CHUNK_SIZE.toString(), String.valueOf(chunk.getBaseChunkLength()));
173 indexChunk(chunk.toString(), sourceName, fields);
175 }
catch (Ingester.IngesterException ingEx) {
176 extractor.logWarning(
"Ingester had a problem with extracted string from file '"
177 + sourceName +
"' (id: " + sourceID +
").", ingEx);
182 if (chunker.hasException()) {
183 extractor.logWarning(
"Error chunking content from " + sourceID +
": " + sourceName, chunker.getException());
186 }
catch (Exception ex) {
187 extractor.logWarning(
"Unexpected error, can't read content stream from " + sourceID +
": " + sourceName, ex);
190 if (context != null && context.fileIngestIsCancelled()) {
194 fields.put(Server.Schema.NUM_CHUNKS.toString(), Integer.toString(numChunks));
196 fields.put(Server.Schema.ID.toString(), Long.toString(sourceID));
198 fields.remove(Server.Schema.CHUNK_SIZE.toString());
199 indexChunk(null, sourceName, fields);
218 private void indexChunk(String chunk, String sourceName, Map<String, String> fields)
throws IngesterException {
219 if (fields.get(Server.Schema.IMAGE_ID.toString()) == null) {
224 String msg = NbBundle.getMessage(Ingester.class,
225 "Ingester.ingest.exception.unknownImgId.msg", sourceName);
226 logger.log(Level.SEVERE, msg);
227 throw new IngesterException(msg);
231 SolrInputDocument updateDoc =
new SolrInputDocument();
232 for (String key : fields.keySet()) {
233 updateDoc.addField(key, fields.get(key));
241 updateDoc.addField(Server.Schema.CONTENT.toString(), chunk);
245 double indexSchemaVersion = NumberUtils.toDouble(solrServer.getIndexInfo().getSchemaVersion());
246 if (indexSchemaVersion >= 2.1) {
247 updateDoc.addField(Server.Schema.CONTENT_STR.toString(), ((chunk == null) ?
"" : chunk.toLowerCase()));
250 TimingMetric metric = HealthMonitor.getTimingMetric(
"Solr: Index chunk");
252 solrServer.addDocument(updateDoc);
253 HealthMonitor.submitTimingMetric(metric);
254 uncommitedIngests =
true;
256 }
catch (KeywordSearchModuleException | NoOpenCoreException ex) {
258 throw new IngesterException(
259 NbBundle.getMessage(Ingester.class,
"Ingester.ingest.exception.err.msg", sourceName), ex);
270 uncommitedIngests =
false;
271 }
catch (NoOpenCoreException | SolrServerException ex) {
272 logger.log(Level.WARNING,
"Error commiting index", ex);
280 static private class SolrFieldsVisitor extends SleuthkitItemVisitor.Default<Map<String, String>> {
283 protected Map<String, String>
defaultVisit(SleuthkitVisitableItem svi) {
284 return new HashMap<>();
288 public Map<String, String>
visit(File f) {
293 public Map<String, String>
visit(DerivedFile df) {
298 public Map<String, String>
visit(Directory d) {
303 public Map<String, String>
visit(LocalDirectory ld) {
308 public Map<String, String>
visit(LayoutFile lf) {
314 public Map<String, String>
visit(LocalFile lf) {
319 public Map<String, String>
visit(SlackFile f) {
350 Map<String, String> params =
new HashMap<>();
351 params.put(
Server.
Schema.ID.toString(), Long.toString(file.getId()));
353 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(file.getDataSource().getId()));
354 }
catch (TskCoreException ex) {
355 logger.log(Level.SEVERE,
"Could not get data source id to properly index the file " + file.getId(), ex);
356 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(-1));
358 params.put(
Server.
Schema.FILE_NAME.toString(), file.getName().toLowerCase());
370 public Map<String, String>
visit(BlackboardArtifact artifact) {
371 Map<String, String> params =
new HashMap<>();
372 params.put(
Server.
Schema.ID.toString(), Long.toString(artifact.getArtifactID()));
374 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(ArtifactTextExtractor.getDataSource(artifact).getId()));
375 }
catch (TskCoreException ex) {
376 logger.log(Level.SEVERE,
"Could not get data source id to properly index the artifact " + artifact.getArtifactID(), ex);
377 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(-1));
390 public Map<String, String>
visit(Report report) {
391 Map<String, String> params =
new HashMap<>();
392 params.put(
Server.
Schema.ID.toString(), Long.toString(report.getId()));
394 Content dataSource = report.getDataSource();
395 if (null == dataSource) {
396 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(-1));
398 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(dataSource.getId()));
400 }
catch (TskCoreException ex) {
401 logger.log(Level.SEVERE,
"Could not get data source id to properly index the report, using default value. Id: " + report.getId(), ex);
402 params.put(
Server.
Schema.IMAGE_ID.toString(), Long.toString(-1));
412 static class IngesterException
extends Exception {
414 private static final long serialVersionUID = 1L;
416 IngesterException(String message, Throwable ex) {
420 IngesterException(String message) {
Map< String, String > visit(Report report)
Map< String, String > visit(LayoutFile lf)
Map< String, String > visit(File f)
Map< String, String > visit(LocalDirectory ld)
Map< String, String > getCommonAndMACTimeFields(AbstractFile file)
Map< String, String > visit(SlackFile f)
Map< String, String > visit(Directory d)
static String getStringTimeISO8601(long epochSeconds, TimeZone tzone)
Map< String, String > getCommonFields(AbstractFile file)
Map< String, String > visit(DerivedFile df)
Map< String, String > visit(BlackboardArtifact artifact)
Map< String, String > visit(LocalFile lf)
Map< String, String > defaultVisit(SleuthkitVisitableItem svi)