Autopsy  4.19.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
HashDbIngestModule.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2011-2018 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.modules.hashdatabase;
20 
21 import java.util.ArrayList;
22 import java.util.Arrays;
23 import java.util.Collection;
24 import java.util.HashMap;
25 import java.util.List;
26 import java.util.concurrent.atomic.AtomicLong;
27 import java.util.function.Function;
28 import java.util.logging.Level;
29 import java.util.stream.Stream;
30 import org.openide.util.NbBundle;
31 import org.openide.util.NbBundle.Messages;
43 import org.sleuthkit.datamodel.AbstractFile;
44 import org.sleuthkit.datamodel.Blackboard;
45 import org.sleuthkit.datamodel.BlackboardArtifact;
46 import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
47 import org.sleuthkit.datamodel.BlackboardAttribute;
48 import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
49 import org.sleuthkit.datamodel.HashHitInfo;
50 import org.sleuthkit.datamodel.HashUtility;
51 import org.sleuthkit.datamodel.Score;
52 import org.sleuthkit.datamodel.Score.Significance;
53 import org.sleuthkit.datamodel.SleuthkitCase;
54 import org.sleuthkit.datamodel.TskCoreException;
55 import org.sleuthkit.datamodel.TskData;
56 import org.sleuthkit.datamodel.TskException;
57 
61 @Messages({
62  "HashDbIngestModule.noKnownBadHashDbSetMsg=No notable hash set.",
63  "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn=Notable file search will not be executed.",
64  "HashDbIngestModule.noKnownHashDbSetMsg=No known hash set.",
65  "HashDbIngestModule.knownFileSearchWillNotExecuteWarn=Known file search will not be executed.",
66  "# {0} - fileName", "HashDbIngestModule.lookingUpKnownBadHashValueErr=Error encountered while looking up notable hash value for {0}.",
67  "# {0} - fileName", "HashDbIngestModule.lookingUpNoChangeHashValueErr=Error encountered while looking up no change hash value for {0}.",
68  "# {0} - fileName", "HashDbIngestModule.lookingUpKnownHashValueErr=Error encountered while looking up known hash value for {0}.",})
69 public class HashDbIngestModule implements FileIngestModule {
70 
71  private static final Logger logger = Logger.getLogger(HashDbIngestModule.class.getName());
72 
73  private final Function<AbstractFile, String> knownBadLookupError
74  = (file) -> Bundle.HashDbIngestModule_lookingUpKnownBadHashValueErr(file.getName());
75 
76  private final Function<AbstractFile, String> noChangeLookupError
77  = (file) -> Bundle.HashDbIngestModule_lookingUpNoChangeHashValueErr(file.getName());
78 
79  private final Function<AbstractFile, String> knownLookupError
80  = (file) -> Bundle.HashDbIngestModule_lookingUpKnownHashValueErr(file.getName());
81 
82  private static final int MAX_COMMENT_SIZE = 500;
83  private final IngestServices services = IngestServices.getInstance();
84  private final SleuthkitCase skCase;
85  private final HashDbManager hashDbManager = HashDbManager.getInstance();
86  private final HashLookupModuleSettings settings;
87  private final List<HashDb> knownBadHashSets = new ArrayList<>();
88  private final List<HashDb> knownHashSets = new ArrayList<>();
89  private final List<HashDb> noChangeHashSets = new ArrayList<>();
90  private long jobId;
91  private static final HashMap<Long, IngestJobTotals> totalsForIngestJobs = new HashMap<>();
92  private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
93  private Blackboard blackboard;
94 
98  private static class IngestJobTotals {
99 
100  private final AtomicLong totalKnownBadCount = new AtomicLong(0);
101  private final AtomicLong totalNoChangeCount = new AtomicLong(0);
102  private final AtomicLong totalCalctime = new AtomicLong(0);
103  private final AtomicLong totalLookuptime = new AtomicLong(0);
104  }
105 
106  private static synchronized IngestJobTotals getTotalsForIngestJobs(long ingestJobId) {
107  IngestJobTotals totals = totalsForIngestJobs.get(ingestJobId);
108  if (totals == null) {
109  totals = new HashDbIngestModule.IngestJobTotals();
110  totalsForIngestJobs.put(ingestJobId, totals);
111  }
112  return totals;
113  }
114 
124  HashDbIngestModule(HashLookupModuleSettings settings) throws NoCurrentCaseException {
125  this.settings = settings;
127  }
128 
129  @Override
131  jobId = context.getJobId();
132  if (!hashDbManager.verifyAllDatabasesLoadedCorrectly()) {
133  throw new IngestModuleException("Could not load all hash sets");
134  }
135 
136  initializeHashsets(hashDbManager.getAllHashSets());
137 
138  if (refCounter.incrementAndGet(jobId) == 1) {
139  // initialize job totals
140  getTotalsForIngestJobs(jobId);
141 
142  // if first module for this job then post error msgs if needed
143  if (knownBadHashSets.isEmpty()) {
146  Bundle.HashDbIngestModule_noKnownBadHashDbSetMsg(),
147  Bundle.HashDbIngestModule_knownBadFileSearchWillNotExecuteWarn()));
148  }
149 
150  if (knownHashSets.isEmpty()) {
153  Bundle.HashDbIngestModule_noKnownHashDbSetMsg(),
154  Bundle.HashDbIngestModule_knownFileSearchWillNotExecuteWarn()));
155  }
156  }
157  }
158 
165  private void initializeHashsets(List<HashDb> allHashSets) {
166  for (HashDb db : allHashSets) {
167  if (settings.isHashSetEnabled(db)) {
168  try {
169  if (db.isValid()) {
170  switch (db.getKnownFilesType()) {
171  case KNOWN:
172  knownHashSets.add(db);
173  break;
174  case KNOWN_BAD:
175  knownBadHashSets.add(db);
176  break;
177  case NO_CHANGE:
178  noChangeHashSets.add(db);
179  break;
180  default:
181  throw new TskCoreException("Unknown KnownFilesType: " + db.getKnownFilesType());
182  }
183  }
184  } catch (TskCoreException ex) {
185  logger.log(Level.WARNING, "Error getting index status for " + db.getDisplayName() + " hash set", ex); //NON-NLS
186  }
187  }
188  }
189  }
190 
191  @Messages({
192  "# {0} - File name",
193  "HashDbIngestModule.dialogTitle.errorFindingArtifacts=Error Finding Artifacts: {0}",
194  "# {0} - File name",
195  "HashDbIngestModule.errorMessage.lookingForFileArtifacts=Error encountered while looking for existing artifacts for {0}."
196  })
197  @Override
198  public ProcessResult process(AbstractFile file) {
199  try {
200  blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
201  } catch (NoCurrentCaseException ex) {
202  logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
203  return ProcessResult.ERROR;
204  }
205 
206  if (shouldSkip(file)) {
207  return ProcessResult.OK;
208  }
209 
210  // Safely get a reference to the totalsForIngestJobs object
211  IngestJobTotals totals = getTotalsForIngestJobs(jobId);
212 
213  // calc hash values
214  try {
215  calculateHashes(file, totals);
216  } catch (TskCoreException ex) {
217  logger.log(Level.WARNING, String.format("Error calculating hash of file '%s' (id=%d).", file.getName(), file.getId()), ex); //NON-NLS
220  NbBundle.getMessage(this.getClass(), "HashDbIngestModule.fileReadErrorMsg", file.getName()),
221  NbBundle.getMessage(this.getClass(), "HashDbIngestModule.calcHashValueErr",
222  file.getParentPath() + file.getName(),
223  file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC) ? "Allocated File" : "Deleted File")));
224  }
225 
226  // the processing result of handling this file
228 
229  // look up in notable first
230  FindInHashsetsResult knownBadResult = findInHashsets(file, totals.totalKnownBadCount,
231  totals.totalLookuptime, knownBadHashSets, TskData.FileKnown.BAD, knownBadLookupError);
232 
233  boolean foundBad = knownBadResult.isFound();
234  if (knownBadResult.isError()) {
235  ret = ProcessResult.ERROR;
236  }
237 
238  // look up no change items next
239  FindInHashsetsResult noChangeResult = findInHashsets(file, totals.totalNoChangeCount,
240  totals.totalLookuptime, noChangeHashSets, TskData.FileKnown.UNKNOWN, noChangeLookupError);
241 
242  if (noChangeResult.isError()) {
243  ret = ProcessResult.ERROR;
244  }
245 
246  // If the file is not in the notable sets, search for it in the known sets.
247  // Any hit is sufficient to classify it as known, and there is no need to create
248  // a hit artifact or send a message to the application inbox.
249  if (!foundBad) {
250  for (HashDb db : knownHashSets) {
251  try {
252  long lookupstart = System.currentTimeMillis();
253  if (db.lookupMD5Quick(file)) {
254  file.setKnown(TskData.FileKnown.KNOWN);
255  break;
256  }
257  long delta = (System.currentTimeMillis() - lookupstart);
258  totals.totalLookuptime.addAndGet(delta);
259 
260  } catch (TskException ex) {
261  reportLookupError(ex, file, knownLookupError);
262  ret = ProcessResult.ERROR;
263  }
264  }
265  }
266 
267  return ret;
268  }
269 
277  private boolean shouldSkip(AbstractFile file) {
278  // Skip unallocated space files.
279  if ((file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
280  || file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
281  return true;
282  }
283 
284  /*
285  * Skip directories. One reason for this is because we won't accurately
286  * calculate hashes of NTFS directories that have content that spans the
287  * IDX_ROOT and IDX_ALLOC artifacts. So we disable that until a solution
288  * for it is developed.
289  */
290  if (file.isDir()) {
291  return true;
292  }
293 
294  // bail out if we have no hashes set
295  if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (!settings.shouldCalculateHashes())) {
296  return true;
297  }
298 
299  return false;
300  }
301 
311  private void reportLookupError(TskException ex, AbstractFile file, Function<AbstractFile, String> lookupErrorMessage) {
312  logger.log(Level.WARNING, String.format(
313  "Couldn't lookup notable hash for file '%s' (id=%d) - see sleuthkit log for details", file.getName(), file.getId()), ex); //NON-NLS
316  NbBundle.getMessage(this.getClass(), "HashDbIngestModule.hashLookupErrorMsg", file.getName()),
317  lookupErrorMessage.apply(file)));
318  }
319 
323  private static class FindInHashsetsResult {
324 
325  private final boolean found;
326  private final boolean error;
327 
328  FindInHashsetsResult(boolean found, boolean error) {
329  this.found = found;
330  this.error = error;
331  }
332 
338  boolean isFound() {
339  return found;
340  }
341 
349  boolean isError() {
350  return error;
351  }
352  }
353 
372  private FindInHashsetsResult findInHashsets(AbstractFile file, AtomicLong totalCount, AtomicLong totalLookupTime,
373  List<HashDb> hashSets, TskData.FileKnown statusIfFound, Function<AbstractFile, String> lookupErrorMessage) {
374 
375  boolean found = false;
376  boolean wasError = false;
377  for (HashDb db : hashSets) {
378  try {
379  long lookupstart = System.currentTimeMillis();
380  HashHitInfo hashInfo = db.lookupMD5(file);
381  if (null != hashInfo) {
382  found = true;
383 
384  totalCount.incrementAndGet();
385  file.setKnown(statusIfFound);
386  String comment = generateComment(hashInfo);
387  if (!createArtifactIfNotExists(file, comment, db)) {
388  wasError = true;
389  }
390  }
391  long delta = (System.currentTimeMillis() - lookupstart);
392  totalLookupTime.addAndGet(delta);
393 
394  } catch (TskException ex) {
395  reportLookupError(ex, file, lookupErrorMessage);
396  wasError = true;
397  }
398  }
399 
400  return new FindInHashsetsResult(found, wasError);
401  }
402 
410  private String generateComment(HashHitInfo hashInfo) {
411  String comment = "";
412  ArrayList<String> comments = hashInfo.getComments();
413  int i = 0;
414  for (String c : comments) {
415  if (++i > 1) {
416  comment += " ";
417  }
418  comment += c;
419  if (comment.length() > MAX_COMMENT_SIZE) {
420  comment = comment.substring(0, MAX_COMMENT_SIZE) + "...";
421  break;
422  }
423  }
424  return comment;
425  }
426 
436  private boolean createArtifactIfNotExists(AbstractFile file, String comment, HashDb db) {
437  /*
438  * We have a match. Now create an artifact if it is determined that one
439  * hasn't been created yet.
440  */
441  List<BlackboardAttribute> attributesList = new ArrayList<>();
442  attributesList.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, HashLookupModuleFactory.getModuleName(), db.getDisplayName()));
443  try {
444  Blackboard tskBlackboard = skCase.getBlackboard();
445  if (tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, attributesList) == false) {
446  postHashSetHitToBlackboard(file, file.getMd5Hash(), db, comment);
447  }
448  } catch (TskCoreException ex) {
449  logger.log(Level.SEVERE, String.format(
450  "A problem occurred while checking for existing artifacts for file '%s' (id=%d).", file.getName(), file.getId()), ex); //NON-NLS
453  Bundle.HashDbIngestModule_dialogTitle_errorFindingArtifacts(file.getName()),
454  Bundle.HashDbIngestModule_errorMessage_lookingForFileArtifacts(file.getName())));
455  return false;
456  }
457  return true;
458  }
459 
467  private void calculateHashes(AbstractFile file, IngestJobTotals totals) throws TskCoreException {
468 
469  // First check if we've already calculated the hashes.
470  String md5Hash = file.getMd5Hash();
471  String sha256Hash = file.getSha256Hash();
472  if ((md5Hash != null && ! md5Hash.isEmpty())
473  && (sha256Hash != null && ! sha256Hash.isEmpty())) {
474  return;
475  }
476 
477  TimingMetric metric = HealthMonitor.getTimingMetric("Disk Reads: Hash calculation");
478  long calcstart = System.currentTimeMillis();
479  List<HashUtility.HashResult> newHashResults =
480  HashUtility.calculateHashes(file, Arrays.asList(HashUtility.HashType.MD5,HashUtility.HashType.SHA256 ));
481  if (file.getSize() > 0) {
482  // Surprisingly, the hash calculation does not seem to be correlated that
483  // strongly with file size until the files get large.
484  // Only normalize if the file size is greater than ~1MB.
485  if (file.getSize() < 1000000) {
487  } else {
488  // In testing, this normalization gave reasonable resuls
489  HealthMonitor.submitNormalizedTimingMetric(metric, file.getSize() / 500000);
490  }
491  }
492  for (HashUtility.HashResult hash : newHashResults) {
493  if (hash.getType().equals(HashUtility.HashType.MD5)) {
494  file.setMd5Hash(hash.getValue());
495  } else if (hash.getType().equals(HashUtility.HashType.SHA256)) {
496  file.setSha256Hash(hash.getValue());
497  }
498  }
499  long delta = (System.currentTimeMillis() - calcstart);
500  totals.totalCalctime.addAndGet(delta);
501  }
502 
508  private Score getScore(HashDb.KnownFilesType knownFilesType) {
509  if (knownFilesType == null) {
510  return Score.SCORE_UNKNOWN;
511  }
512  switch (knownFilesType) {
513  case KNOWN:
514  return Score.SCORE_NONE;
515  case KNOWN_BAD:
516  return Score.SCORE_NOTABLE;
517  default:
518  case NO_CHANGE:
519  return Score.SCORE_UNKNOWN;
520  }
521  }
530  @Messages({
531  "HashDbIngestModule.indexError.message=Failed to index hashset hit artifact for keyword search."
532  })
533  private void postHashSetHitToBlackboard(AbstractFile abstractFile, String md5Hash, HashDb db, String comment) {
534  try {
535  String moduleName = HashLookupModuleFactory.getModuleName();
536 
537  List<BlackboardAttribute> attributes = Arrays.asList(
538  new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, moduleName, db.getDisplayName()),
539  new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_HASH_MD5, moduleName, md5Hash),
540  new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, moduleName, comment)
541  );
542 
543  // BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList
544  BlackboardArtifact badFile = abstractFile.newAnalysisResult(
545  BlackboardArtifact.Type.TSK_HASHSET_HIT, getScore(db.getKnownFilesType()),
546  null, db.getDisplayName(), null,
547  attributes
548  ).getAnalysisResult();
549 
550  try {
551  /*
552  * post the artifact which will index the artifact for keyword
553  * search, and fire an event to notify UI of this new artifact
554  */
555  blackboard.postArtifact(badFile, moduleName);
556  } catch (Blackboard.BlackboardException ex) {
557  logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS
559  Bundle.HashDbIngestModule_indexError_message(), badFile.getDisplayName());
560  }
561 
562  if (db.getSendIngestMessages()) {
563  StringBuilder detailsSb = new StringBuilder();
564  //details
565  detailsSb.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS
566  //hit
567  detailsSb.append("<tr>"); //NON-NLS
568  detailsSb.append("<th>") //NON-NLS
569  .append(NbBundle.getMessage(this.getClass(), "HashDbIngestModule.postToBB.fileName"))
570  .append("</th>"); //NON-NLS
571  detailsSb.append("<td>") //NON-NLS
572  .append(abstractFile.getName())
573  .append("</td>"); //NON-NLS
574  detailsSb.append("</tr>"); //NON-NLS
575 
576  detailsSb.append("<tr>"); //NON-NLS
577  detailsSb.append("<th>") //NON-NLS
578  .append(NbBundle.getMessage(this.getClass(), "HashDbIngestModule.postToBB.md5Hash"))
579  .append("</th>"); //NON-NLS
580  detailsSb.append("<td>").append(md5Hash).append("</td>"); //NON-NLS
581  detailsSb.append("</tr>"); //NON-NLS
582 
583  detailsSb.append("<tr>"); //NON-NLS
584  detailsSb.append("<th>") //NON-NLS
585  .append(NbBundle.getMessage(this.getClass(), "HashDbIngestModule.postToBB.hashsetName"))
586  .append("</th>"); //NON-NLS
587  detailsSb.append("<td>").append(db.getDisplayName()).append("</td>"); //NON-NLS
588  detailsSb.append("</tr>"); //NON-NLS
589 
590  detailsSb.append("</table>"); //NON-NLS
591 
593  NbBundle.getMessage(this.getClass(), "HashDbIngestModule.postToBB.knownBadMsg", abstractFile.getName()),
594  detailsSb.toString(),
595  abstractFile.getName() + md5Hash,
596  badFile));
597  }
598  } catch (TskException ex) {
599  logger.log(Level.WARNING, "Error creating blackboard artifact", ex); //NON-NLS
600  }
601  }
602 
611  @Messages("HashDbIngestModule.complete.noChangesFound=No Change items found:")
612  private static synchronized void postSummary(long jobId, List<HashDb> knownBadHashSets,
613  List<HashDb> noChangeHashSets, List<HashDb> knownHashSets) {
614 
615  IngestJobTotals jobTotals = getTotalsForIngestJobs(jobId);
616  totalsForIngestJobs.remove(jobId);
617 
618  if ((!knownBadHashSets.isEmpty()) || (!knownHashSets.isEmpty()) || (!noChangeHashSets.isEmpty())) {
619  StringBuilder detailsSb = new StringBuilder();
620  //details
621  detailsSb.append(
622  "<table border='0' cellpadding='4' width='280'>" +
623  "<tr><td>" + NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.knownBadsFound") + "</td>" +
624  "<td>" + jobTotals.totalKnownBadCount.get() + "</td></tr>" +
625 
626  "<tr><td>" + Bundle.HashDbIngestModule_complete_noChangesFound() + "</td>" +
627  "<td>" + jobTotals.totalNoChangeCount.get() + "</td></tr>" +
628 
629  "<tr><td>" + NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.totalCalcTime") +
630  "</td><td>" + jobTotals.totalCalctime.get() + "</td></tr>\n" +
631 
632  "<tr><td>" + NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.totalLookupTime") +
633  "</td><td>" + jobTotals.totalLookuptime.get() + "</td></tr>\n</table>" +
634 
635  "<p>" + NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.databasesUsed") + "</p>\n<ul>"); //NON-NLS
636 
637  Stream.concat(knownBadHashSets.stream(), noChangeHashSets.stream()).forEach((db) -> {
638  detailsSb.append("<li>" + db.getHashSetName() + "</li>\n"); //NON-NLS
639  });
640 
641  detailsSb.append("</ul>"); //NON-NLS
642 
646  NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.hashLookupResults"),
647  detailsSb.toString()));
648  }
649  }
650 
651  @Override
652  public void shutDown() {
653  if (refCounter.decrementAndGet(jobId) == 0) {
654  postSummary(jobId, knownBadHashSets, noChangeHashSets, knownHashSets);
655  }
656  }
657 }
static IngestMessage createDataMessage(String source, String subject, String detailsHtml, String uniqueKey, BlackboardArtifact data)
static IngestMessage createErrorMessage(String source, String subject, String detailsHtml)
void startUp(org.sleuthkit.autopsy.ingest.IngestJobContext context)
static IngestMessage createMessage(MessageType messageType, String source, String subject, String detailsHtml)
static synchronized IngestJobTotals getTotalsForIngestJobs(long ingestJobId)
void calculateHashes(AbstractFile file, IngestJobTotals totals)
static TimingMetric getTimingMetric(String name)
FindInHashsetsResult findInHashsets(AbstractFile file, AtomicLong totalCount, AtomicLong totalLookupTime, List< HashDb > hashSets, TskData.FileKnown statusIfFound, Function< AbstractFile, String > lookupErrorMessage)
void postMessage(final IngestMessage message)
void reportLookupError(TskException ex, AbstractFile file, Function< AbstractFile, String > lookupErrorMessage)
static void submitTimingMetric(TimingMetric metric)
static void error(String title, String message)
synchronized static Logger getLogger(String name)
Definition: Logger.java:124
static IngestMessage createWarningMessage(String source, String subject, String detailsHtml)
boolean createArtifactIfNotExists(AbstractFile file, String comment, HashDb db)
void postHashSetHitToBlackboard(AbstractFile abstractFile, String md5Hash, HashDb db, String comment)
static void submitNormalizedTimingMetric(TimingMetric metric, long normalization)
static synchronized IngestServices getInstance()

Copyright © 2012-2021 Basis Technology. Generated on: Fri Aug 6 2021
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.