Autopsy  4.19.3
Graphical digital forensics platform for The Sleuth Kit and other tools.
CommandLineIngestManager.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2019-2022 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.commandlineingest;
20 
21 import com.google.gson.GsonBuilder;
22 import java.beans.PropertyChangeEvent;
23 import java.beans.PropertyChangeListener;
24 import java.nio.file.Paths;
25 import java.util.ArrayList;
26 import java.util.Arrays;
27 import java.util.List;
28 import java.util.UUID;
29 import java.util.Collection;
30 import java.util.EnumSet;
31 import java.util.Iterator;
32 import java.util.Map;
33 import java.util.Set;
34 import java.util.logging.Level;
35 import org.netbeans.spi.sendopts.OptionProcessor;
36 import org.openide.LifecycleManager;
37 import org.openide.util.Lookup;
64 import org.sleuthkit.datamodel.Content;
65 import org.sleuthkit.datamodel.TskCoreException;
66 
72 public class CommandLineIngestManager extends CommandLineManager {
73 
74  private static final Logger LOGGER = Logger.getLogger(CommandLineIngestManager.class.getName());
76  private Case caseForJob = null;
78 
79  static final int CL_SUCCESS = 0;
80  static final int CL_RUN_FAILURE = -1;
81  static final int CL_PROCESS_FAILURE = -2;
82 
84  }
85 
86  public void start() {
87  new Thread(new JobProcessingTask()).start();
88  }
89 
90  void stop() {
91  stop(CL_SUCCESS);
92  }
93 
94  void stop(int errorCode) {
95  try {
96  // close current case if there is one open
97  Case.closeCurrentCase();
98  } catch (CaseActionException ex) {
99  LOGGER.log(Level.WARNING, "Unable to close the case while shutting down command line ingest manager", ex); //NON-NLS
100  }
101 
102  // shut down Autopsy
103  if (errorCode == CL_SUCCESS) {
104  LifecycleManager.getDefault().exit();
105  } else {
106  LifecycleManager.getDefault().exit(errorCode);
107  }
108  }
109 
110  private final class JobProcessingTask implements Runnable {
111 
112  private final Object ingestLock;
113 
114  private JobProcessingTask() {
115  ingestLock = new Object();
116  try {
118  LOGGER.log(Level.INFO, "Set running with desktop GUI runtime property to false");
120  LOGGER.log(Level.SEVERE, "Failed to set running with desktop GUI runtime property to false", ex);
121  }
122  }
123 
128  @Override
129  public void run() {
130  LOGGER.log(Level.INFO, "Job processing task started");
131  int errorCode = CL_SUCCESS;
132 
133  try {
134  // read command line inputs
135  LOGGER.log(Level.INFO, "Autopsy is running from command line"); //NON-NLS
136  List<CommandLineCommand> commands = null;
137 
138  // first look up all OptionProcessors and get input data from CommandLineOptionProcessor
139  Collection<? extends OptionProcessor> optionProcessors = Lookup.getDefault().lookupAll(OptionProcessor.class);
140  Iterator<? extends OptionProcessor> optionsIterator = optionProcessors.iterator();
141  while (optionsIterator.hasNext()) {
142  // find CommandLineOptionProcessor
143  OptionProcessor processor = optionsIterator.next();
144  if (processor instanceof CommandLineOptionProcessor) {
145  // check if we are running from command line
146  commands = ((CommandLineOptionProcessor) processor).getCommands();
147  }
148  }
149  try {
150  if (commands == null || commands.isEmpty()) {
151  LOGGER.log(Level.SEVERE, "No command line commands specified");
152  System.out.println("No command line commands specified");
153  errorCode = CL_RUN_FAILURE;
154  return;
155  }
156 
157  // Commands are already stored in order in which they should be executed
158  for (CommandLineCommand command : commands) {
159  CommandLineCommand.CommandType type = command.getType();
160  switch (type) {
161  case CREATE_CASE:
162  try {
163  LOGGER.log(Level.INFO, "Processing 'Create Case' command");
164  System.out.println("Processing 'Create Case' command");
165  Map<String, String> inputs = command.getInputs();
166  String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
167  String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
169  String caseTypeString = inputs.get(CommandLineCommand.InputType.CASE_TYPE.name());
170  if (caseTypeString != null && caseTypeString.equalsIgnoreCase(CommandLineOptionProcessor.CASETYPE_MULTI)) {
171  caseType = CaseType.MULTI_USER_CASE;
172  }
173  caseForJob = createCase(baseCaseName, rootOutputDirectory, caseType);
174 
175  String outputDirPath = getOutputDirPath(caseForJob);
176  OutputGenerator.saveCreateCaseOutput(caseForJob, outputDirPath, baseCaseName);
177  } catch (CaseActionException ex) {
178  String baseCaseName = command.getInputs().get(CommandLineCommand.InputType.CASE_NAME.name());
179  LOGGER.log(Level.SEVERE, "Error creating or opening case " + baseCaseName, ex);
180  System.out.println("Error creating or opening case " + baseCaseName);
181  // Do not process any other commands
182  errorCode = CL_RUN_FAILURE;
183  return;
184  }
185  break;
186  case ADD_DATA_SOURCE:
187  try {
188  LOGGER.log(Level.INFO, "Processing 'Add Data Source' command");
189  System.out.println("Processing 'Add Data Source' command");
190  Map<String, String> inputs = command.getInputs();
191 
192  // open the case, if it hasn't been already opened by CREATE_CASE command
193  if (caseForJob == null) {
194  // find case output directory by name and open the case
195  String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
196  String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
197  caseForJob = openExistingCase(baseCaseName, rootOutputDirectory);
198  }
199 
200  String dataSourcePath = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
201  dataSource = new AutoIngestDataSource(UUID.randomUUID().toString(), Paths.get(dataSourcePath));
202  runDataSourceProcessor(caseForJob, dataSource);
203 
204  String outputDirPath = getOutputDirPath(caseForJob);
205  OutputGenerator.saveAddDataSourceOutput(caseForJob, dataSource, outputDirPath);
207  String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
208  LOGGER.log(Level.SEVERE, "Error adding data source " + dataSourcePath, ex);
209  System.out.println("Error adding data source " + dataSourcePath);
210  // Do not process any other commands
211  errorCode = CL_RUN_FAILURE;
212  return;
213  }
214  break;
215  case RUN_INGEST:
216  try {
217  LOGGER.log(Level.INFO, "Processing 'Run Ingest' command");
218  System.out.println("Processing 'Run Ingest' command");
219  Map<String, String> inputs = command.getInputs();
220 
221  // open the case, if it hasn't been already opened by CREATE_CASE command
222  if (caseForJob == null) {
223  // find case output directory by name and open the case
224  String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
225  String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
226  caseForJob = openExistingCase(baseCaseName, rootOutputDirectory);
227  }
228 
229  // populate the AutoIngestDataSource structure, if that hasn't been done by ADD_DATA_SOURCE command
230  if (dataSource == null) {
231 
232  String dataSourceId = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_ID.name());
233  Long dataSourceObjId = Long.valueOf(dataSourceId);
234 
235  // get Content object for the data source
236  Content content = null;
237  try {
238  content = Case.getCurrentCaseThrows().getSleuthkitCase().getContentById(dataSourceObjId);
239  } catch (TskCoreException ex) {
240  LOGGER.log(Level.SEVERE, "Exception while trying to find data source with object ID " + dataSourceId, ex);
241  System.out.println("Exception while trying to find data source with object ID " + dataSourceId);
242  // Do not process any other commands
243  errorCode = CL_RUN_FAILURE;
244  return;
245  }
246 
247  if (content == null) {
248  LOGGER.log(Level.SEVERE, "Unable to find data source with object ID {0}", dataSourceId);
249  System.out.println("Unable to find data source with object ID " + dataSourceId);
250  // Do not process any other commands
251  return;
252  }
253 
254  // populate the AutoIngestDataSource structure
255  dataSource = new AutoIngestDataSource("", Paths.get(content.getName()));
256  List<Content> contentList = Arrays.asList(new Content[]{content});
257  List<String> errorList = new ArrayList<>();
258  dataSource.setDataSourceProcessorOutput(NO_ERRORS, errorList, contentList);
259  }
260 
261  // run ingest
262  String ingestProfile = inputs.get(CommandLineCommand.InputType.INGEST_PROFILE_NAME.name());
263  analyze(dataSource, ingestProfile);
264  } catch (InterruptedException | CaseActionException | AnalysisStartupException ex) {
265  String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
266  LOGGER.log(Level.SEVERE, "Error running ingest on data source " + dataSourcePath, ex);
267  System.out.println("Error running ingest on data source " + dataSourcePath);
268  // Do not process any other commands
269  errorCode = CL_RUN_FAILURE;
270  return;
271  }
272  break;
273 
274  case LIST_ALL_DATA_SOURCES:
275  try {
276  LOGGER.log(Level.INFO, "Processing 'List All Data Sources' command");
277  System.out.println("Processing 'List All Data Sources' command");
278  Map<String, String> inputs = command.getInputs();
279 
280  // open the case, if it hasn't been already opened by CREATE_CASE command
281  if (caseForJob == null) {
282  // find case output directory by name and open the case
283  String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
284  String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
285  caseForJob = openExistingCase(baseCaseName, rootOutputDirectory);
286  }
287 
288  String outputDirPath = getOutputDirPath(caseForJob);
289  OutputGenerator.listAllDataSources(caseForJob, outputDirPath);
290  } catch (CaseActionException ex) {
291  String baseCaseName = command.getInputs().get(CommandLineCommand.InputType.CASE_NAME.name());
292  String rootOutputDirectory = command.getInputs().get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
293  String msg = "Error opening case " + baseCaseName + " in directory: " + rootOutputDirectory;
294  LOGGER.log(Level.SEVERE, msg, ex);
295  System.out.println(msg);
296  errorCode = CL_RUN_FAILURE;
297  // Do not process any other commands
298  return;
299  }
300  break;
301 
302  case GENERATE_REPORTS:
303  try {
304  LOGGER.log(Level.INFO, "Processing 'Generate Reports' command");
305  System.out.println("Processing 'Generate Reports' command");
306  Map<String, String> inputs = command.getInputs();
307 
308  // open the case, if it hasn't been already opened by CREATE_CASE command
309  if (caseForJob == null) {
310  // find case output directory by name and open the case
311  String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
312  String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
313  caseForJob = openExistingCase(baseCaseName, rootOutputDirectory);
314  }
315  // generate reports
316  String reportName = inputs.get(CommandLineCommand.InputType.REPORT_PROFILE_NAME.name());
317  if (reportName == null) {
319  }
320 
321  // generate reports
323  ReportGenerator generator = new ReportGenerator(reportName, progressIndicator);
324  generator.generateReports();
325  } catch (CaseActionException ex) {
326  String baseCaseName = command.getInputs().get(CommandLineCommand.InputType.CASE_NAME.name());
327  String rootOutputDirectory = command.getInputs().get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
328  String msg = "Error opening case " + baseCaseName + " in directory: " + rootOutputDirectory;
329  LOGGER.log(Level.SEVERE, msg, ex);
330  System.out.println(msg);
331  errorCode = CL_RUN_FAILURE;
332  // Do not process any other commands
333  return;
334  } catch (Exception ex) {
335  String msg = "An exception occurred while generating report: " + ex.getMessage();
336  LOGGER.log(Level.WARNING, msg, ex);
337  System.out.println(msg);
338  errorCode = CL_RUN_FAILURE;
339  // Do not process any other commands
340  return;
341  }
342  break;
343  case LIST_ALL_INGEST_PROFILES:
344  List<IngestProfile> profiles = IngestProfiles.getIngestProfiles();
345  GsonBuilder gb = new GsonBuilder();
346  System.out.println("Listing ingest profiles");
347  for (IngestProfile profile : profiles) {
348  String jsonText = gb.create().toJson(profile);
349  System.out.println(jsonText);
350  }
351  System.out.println("Ingest profile list complete");
352  break;
353  default:
354  break;
355  }
356  }
357  } catch (Throwable ex) {
358  /*
359  * Unexpected runtime exceptions firewall. This task is
360  * designed to be able to be run in an executor service
361  * thread pool without calling get() on the task's
362  * Future<Void>, so this ensures that such errors get
363  * logged.
364  */
365  LOGGER.log(Level.SEVERE, "Unexpected error", ex);
366  System.out.println("Unexpected error. Exiting...");
367  errorCode = CL_RUN_FAILURE;
368  } finally {
369  try {
371  } catch (CaseActionException ex) {
372  LOGGER.log(Level.WARNING, "Exception while closing case", ex);
373  System.out.println("Exception while closing case");
374  }
375  }
376 
377  } finally {
378  LOGGER.log(Level.INFO, "Job processing task finished");
379  System.out.println("Job processing task finished");
380 
381  // shut down Autopsy
382  stop(errorCode);
383  }
384  }
385 
403 
404  LOGGER.log(Level.INFO, "Adding data source {0} ", dataSource.getPath().toString());
405 
406  // Get an ordered list of data source processors to try
407  List<AutoIngestDataSourceProcessor> validDataSourceProcessors;
408  try {
409  validDataSourceProcessors = DataSourceProcessorUtility.getOrderedListOfDataSourceProcessors(dataSource.getPath());
411  LOGGER.log(Level.SEVERE, "Exception while determining best data source processor for {0}", dataSource.getPath());
412  // rethrow the exception.
413  throw ex;
414  }
415 
416  // did we find a data source processor that can process the data source
417  if (validDataSourceProcessors.isEmpty()) {
418  // This should never happen. We should add all unsupported data sources as logical files.
419  LOGGER.log(Level.SEVERE, "Unsupported data source {0}", dataSource.getPath()); // NON-NLS
420  return;
421  }
422 
424  synchronized (ingestLock) {
425  // Try each DSP in decreasing order of confidence
426  for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) {
427  UUID taskId = UUID.randomUUID();
428  caseForJob.notifyAddingDataSource(taskId);
429  DataSourceProcessorCallback callBack = new AddDataSourceCallback(caseForJob, dataSource, taskId, ingestLock);
430  caseForJob.notifyAddingDataSource(taskId);
431  LOGGER.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{dataSource.getPath(), selectedProcessor.getDataSourceType()});
432  selectedProcessor.process(dataSource.getDeviceId(), dataSource.getPath(), progressMonitor, callBack);
433  ingestLock.wait();
434 
435  // at this point we got the content object(s) from the current DSP.
436  // check whether the data source was processed successfully
437  if ((dataSource.getResultDataSourceProcessorResultCode() == CRITICAL_ERRORS)
438  || dataSource.getContent().isEmpty()) {
439  // move onto the the next DSP that can process this data source
440  logDataSourceProcessorResult(dataSource);
441  continue;
442  }
443 
444  logDataSourceProcessorResult(dataSource);
445  return;
446  }
447  // If we get to this point, none of the processors were successful
448  LOGGER.log(Level.SEVERE, "All data source processors failed to process {0}", dataSource.getPath());
449  // Throw an exception. It will get caught & handled upstream and will result in AIM auto-pause.
450  throw new AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException("Failed to process " + dataSource.getPath() + " with all data source processors");
451  }
452  }
453 
461 
463  if (null != resultCode) {
464  switch (resultCode) {
465  case NO_ERRORS:
466  LOGGER.log(Level.INFO, "Added data source to case");
467  if (dataSource.getContent().isEmpty()) {
468  LOGGER.log(Level.SEVERE, "Data source failed to produce content");
469  }
470  break;
471 
472  case NONCRITICAL_ERRORS:
473  for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
474  LOGGER.log(Level.WARNING, "Non-critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
475  }
476  LOGGER.log(Level.INFO, "Added data source to case");
477  if (dataSource.getContent().isEmpty()) {
478  LOGGER.log(Level.SEVERE, "Data source failed to produce content");
479  }
480  break;
481 
482  case CRITICAL_ERRORS:
483  for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
484  LOGGER.log(Level.SEVERE, "Critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
485  }
486  LOGGER.log(Level.SEVERE, "Failed to add data source to case");
487  break;
488  }
489  } else {
490  LOGGER.log(Level.WARNING, "No result code for data source processor for {0}", dataSource.getPath());
491  }
492  }
493 
511  private void analyze(AutoIngestDataSource dataSource, String ingestProfileName) throws AnalysisStartupException, InterruptedException {
512 
513  LOGGER.log(Level.INFO, "Starting ingest modules analysis for {0} ", dataSource.getPath());
514 
515  // configure ingest profile and file filter
516  IngestProfiles.IngestProfile selectedProfile = null;
517  FilesSet selectedFileSet = null;
518  if (!ingestProfileName.isEmpty()) {
519  selectedProfile = getSelectedProfile(ingestProfileName);
520  if (selectedProfile == null) {
521  // unable to find the user specified profile
522  LOGGER.log(Level.SEVERE, "Unable to find ingest profile: {0}. Ingest cancelled!", ingestProfileName);
523  System.out.println("Unable to find ingest profile: " + ingestProfileName + ". Ingest cancelled!");
524  throw new AnalysisStartupException("Unable to find ingest profile: " + ingestProfileName + ". Ingest cancelled!");
525  }
526 
527  // get FileSet filter associated with this profile
528  selectedFileSet = getSelectedFilter(selectedProfile.getFileIngestFilter());
529  if (selectedFileSet == null) {
530  // unable to find the user specified profile
531  LOGGER.log(Level.SEVERE, "Unable to find file filter {0} for ingest profile: {1}. Ingest cancelled!", new Object[]{selectedProfile.getFileIngestFilter(), ingestProfileName});
532  System.out.println("Unable to find file filter " + selectedProfile.getFileIngestFilter() + " for ingest profile: " + ingestProfileName + ". Ingest cancelled!");
533  throw new AnalysisStartupException("Unable to find file filter " + selectedProfile.getFileIngestFilter() + " for ingest profile: " + ingestProfileName + ". Ingest cancelled!");
534  }
535  }
536 
537  IngestJobEventListener ingestJobEventListener = new IngestJobEventListener();
539  try {
540  synchronized (ingestLock) {
541  IngestJobSettings ingestJobSettings;
542  if (selectedProfile == null || selectedFileSet == null) {
543  // use baseline configuration
545  } else {
546  // load the custom ingest
547  ingestJobSettings = new IngestJobSettings(IngestProfilePaths.getInstance().getIngestProfilePrefix() + selectedProfile.toString());
548  ingestJobSettings.setFileFilter(selectedFileSet);
549  }
550 
551  List<String> settingsWarnings = ingestJobSettings.getWarnings();
552  if (settingsWarnings.isEmpty()) {
553  IngestJobStartResult ingestJobStartResult = IngestManager.getInstance().beginIngestJob(dataSource.getContent(), ingestJobSettings);
554  IngestJob ingestJob = ingestJobStartResult.getJob();
555  if (null != ingestJob) {
556  /*
557  * Block until notified by the ingest job event
558  * listener or until interrupted because auto ingest
559  * is shutting down. For very small jobs, it is
560  * possible that ingest has completed by the time we
561  * get here, so check periodically in case the event
562  * was missed.
563  */
565  ingestLock.wait(60000); // Check every minute
566  }
567 
568  LOGGER.log(Level.INFO, "Finished ingest modules analysis for {0} ", dataSource.getPath());
569  IngestJob.ProgressSnapshot jobSnapshot = ingestJob.getSnapshot();
570  IngestJob.ProgressSnapshot.DataSourceProcessingSnapshot snapshot = jobSnapshot.getDataSourceProcessingSnapshot();
571  if (!snapshot.isCancelled()) {
572  List<String> cancelledModules = snapshot.getCancelledDataSourceIngestModules();
573  if (!cancelledModules.isEmpty()) {
574  LOGGER.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", dataSource.getPath()));
575  for (String module : snapshot.getCancelledDataSourceIngestModules()) {
576  LOGGER.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, dataSource.getPath()));
577  }
578  }
579  LOGGER.log(Level.INFO, "Analysis of data source completed");
580  } else {
581  LOGGER.log(Level.WARNING, "Analysis of data source cancelled");
582  IngestJob.CancellationReason cancellationReason = snapshot.getCancellationReason();
583  if (IngestJob.CancellationReason.NOT_CANCELLED != cancellationReason && IngestJob.CancellationReason.USER_CANCELLED != cancellationReason) {
584  throw new AnalysisStartupException(String.format("Analysis cancelled due to %s for %s", cancellationReason.getDisplayName(), dataSource.getPath()));
585  }
586  }
587  } else if (!ingestJobStartResult.getModuleErrors().isEmpty()) {
588  for (IngestModuleError error : ingestJobStartResult.getModuleErrors()) {
589  LOGGER.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), dataSource.getPath()), error.getThrowable());
590  }
591  LOGGER.log(Level.SEVERE, "Failed to analyze data source due to ingest job startup error");
592  throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", dataSource.getPath()));
593  } else {
594  LOGGER.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", dataSource.getPath()), ingestJobStartResult.getStartupException());
595  throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException());
596  }
597  } else {
598  for (String warning : settingsWarnings) {
599  LOGGER.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{dataSource.getPath(), warning});
600  }
601  LOGGER.log(Level.SEVERE, "Failed to analyze data source due to settings errors");
602  throw new AnalysisStartupException("Error(s) in ingest job settings");
603  }
604  }
605  } finally {
606  IngestManager.getInstance().removeIngestJobEventListener(ingestJobEventListener);
607  }
608  }
609 
618  private IngestProfiles.IngestProfile getSelectedProfile(String ingestProfileName) {
619 
620  IngestProfiles.IngestProfile selectedProfile = null;
621  // lookup the profile by name
623  if (profile.toString().equalsIgnoreCase(ingestProfileName)) {
624  // found the profile
625  selectedProfile = profile;
626  break;
627  }
628  }
629  return selectedProfile;
630  }
631 
640  private FilesSet getSelectedFilter(String filterName) {
641  try {
642  Map<String, FilesSet> fileIngestFilters = FilesSetsManager.getInstance()
645  fileIngestFilters.put(fSet.getName(), fSet);
646  }
647  return fileIngestFilters.get(filterName);
649  LOGGER.log(Level.SEVERE, "Failed to get file ingest filter: " + filterName, ex); //NON-NLS
650  return null;
651  }
652  }
653 
663  private class IngestJobEventListener implements PropertyChangeListener {
664 
672  @Override
673  public void propertyChange(PropertyChangeEvent event) {
674  if (AutopsyEvent.SourceType.LOCAL == ((AutopsyEvent) event).getSourceType()) {
675  String eventType = event.getPropertyName();
676  if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
677  synchronized (ingestLock) {
678  ingestLock.notify();
679  }
680  }
681  }
682  }
683  };
684 
691 
697  @Override
698  public void setIndeterminate(final boolean indeterminate) {
699  }
700 
706  @Override
707  public void setProgress(final int progress) {
708  }
709 
715  @Override
716  public void setProgressText(final String text) {
717  }
718  }
719 
725  private final class AnalysisStartupException extends Exception {
726 
727  private static final long serialVersionUID = 1L;
728 
729  private AnalysisStartupException(String message) {
730  super(message);
731  }
732 
733  private AnalysisStartupException(String message, Throwable cause) {
734  super(message, cause);
735  }
736  }
737  }
738 }
static List< AutoIngestDataSourceProcessor > getOrderedListOfDataSourceProcessors(Path dataSourcePath)
static synchronized IngestManager getInstance()
IngestJobStartResult beginIngestJob(Collection< Content > dataSources, IngestJobSettings settings)
synchronized DataSourceProcessorResult getResultDataSourceProcessorResultCode()
static final Set< IngestManager.IngestJobEvent > INGEST_JOB_EVENTS_OF_INTEREST
void removeIngestJobEventListener(final PropertyChangeListener listener)
void addIngestJobEventListener(final PropertyChangeListener listener)
static synchronized List< IngestProfile > getIngestProfiles()
void analyze(AutoIngestDataSource dataSource, String ingestProfileName)
static synchronized void setRunningWithGUI(boolean runningWithGUI)
synchronized void setDataSourceProcessorOutput(DataSourceProcessorResult result, List< String > errorMessages, List< Content > content)
synchronized static Logger getLogger(String name)
Definition: Logger.java:124
void notifyAddingDataSource(UUID eventId)
Definition: Case.java:1767
IngestManager.IngestManagerException getStartupException()

Copyright © 2012-2022 Basis Technology. Generated on: Tue Jun 27 2023
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.