Autopsy  4.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
EventsRepository.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2013-15 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.timeline.db;
20 
21 import com.google.common.cache.CacheBuilder;
22 import com.google.common.cache.CacheLoader;
23 import com.google.common.cache.LoadingCache;
24 import com.google.common.util.concurrent.ThreadFactoryBuilder;
25 import java.util.ArrayList;
26 import java.util.Collection;
27 import java.util.Collections;
28 import java.util.EnumMap;
29 import java.util.List;
30 import java.util.Map;
31 import static java.util.Objects.isNull;
32 import java.util.Set;
33 import java.util.concurrent.CancellationException;
34 import java.util.concurrent.ExecutionException;
35 import java.util.concurrent.Executor;
36 import java.util.concurrent.Executors;
37 import java.util.concurrent.TimeUnit;
38 import java.util.function.Consumer;
39 import java.util.logging.Level;
40 import java.util.stream.Collectors;
41 import javafx.application.Platform;
42 import javafx.beans.property.ReadOnlyBooleanProperty;
43 import javafx.beans.property.ReadOnlyBooleanWrapper;
44 import javafx.beans.property.ReadOnlyObjectProperty;
45 import javafx.collections.FXCollections;
46 import javafx.collections.ObservableList;
47 import javafx.collections.ObservableMap;
48 import javafx.concurrent.Worker;
49 import javax.swing.JOptionPane;
50 import org.apache.commons.lang3.StringUtils;
51 import org.joda.time.Interval;
52 import org.netbeans.api.progress.ProgressHandle;
53 import org.netbeans.api.progress.ProgressHandleFactory;
54 import org.openide.util.NbBundle;
71 import org.sleuthkit.datamodel.AbstractFile;
72 import org.sleuthkit.datamodel.BlackboardArtifact;
73 import org.sleuthkit.datamodel.BlackboardArtifactTag;
74 import org.sleuthkit.datamodel.ContentTag;
75 import org.sleuthkit.datamodel.SleuthkitCase;
76 import org.sleuthkit.datamodel.Tag;
77 import org.sleuthkit.datamodel.TagName;
78 import org.sleuthkit.datamodel.TskCoreException;
79 import org.sleuthkit.datamodel.TskData;
80 
96 public class EventsRepository {
97 
98  private final static Logger LOGGER = Logger.getLogger(EventsRepository.class.getName());
99 
100  private final Executor workerExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("eventrepository-worker-%d").build()); //NON-NLS
102  private final EventDB eventDB;
103  private final Case autoCase;
105 
106  private final LoadingCache<Object, Long> maxCache;
107  private final LoadingCache<Object, Long> minCache;
108  private final LoadingCache<Long, TimeLineEvent> idToEventCache;
109  private final LoadingCache<ZoomParams, Map<EventType, Long>> eventCountsCache;
110  private final LoadingCache<ZoomParams, List<EventStripe>> eventStripeCache;
111 
112  private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
113  private final ObservableMap<Long, String> hashSetMap = FXCollections.observableHashMap();
114  private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
115 
116  public Case getAutoCase() {
117  return autoCase;
118  }
119 
120  public ObservableList<TagName> getTagNames() {
121  return tagNames;
122  }
123 
124  synchronized public ObservableMap<Long, String> getDatasourcesMap() {
125  return datasourcesMap;
126  }
127 
128  synchronized public ObservableMap<Long, String> getHashSetMap() {
129  return hashSetMap;
130  }
131 
132  public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter) {
133  return eventDB.getBoundingEventsInterval(timeRange, filter);
134  }
135 
141  return modelInstance;
142  }
143 
144  public EventsRepository(Case autoCase, ReadOnlyObjectProperty<ZoomParams> currentStateProperty) {
145  this.autoCase = autoCase;
146  //TODO: we should check that case is open, or get passed a case object/directory -jm
147  this.eventDB = EventDB.getEventDB(autoCase);
149  idToEventCache = CacheBuilder.newBuilder()
150  .maximumSize(5000L)
151  .expireAfterAccess(10, TimeUnit.MINUTES)
152  .build(CacheLoader.from(eventDB::getEventById));
153  eventCountsCache = CacheBuilder.newBuilder()
154  .maximumSize(1000L)
155  .expireAfterAccess(10, TimeUnit.MINUTES)
156  .build(CacheLoader.from(eventDB::countEventsByType));
157  eventStripeCache = CacheBuilder.newBuilder()
158  .maximumSize(1000L)
159  .expireAfterAccess(10, TimeUnit.MINUTES
160  ).build(CacheLoader.from(eventDB::getEventStripes));
161  maxCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMaxTime));
162  minCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMinTime));
163  this.modelInstance = new FilteredEventsModel(this, currentStateProperty);
164  }
165 
169  public Long getMaxTime() {
170  return maxCache.getUnchecked("max"); // NON-NLS
171 
172  }
173 
177  public Long getMinTime() {
178  return minCache.getUnchecked("min"); // NON-NLS
179 
180  }
181 
182  public TimeLineEvent getEventById(Long eventID) {
183  return idToEventCache.getUnchecked(eventID);
184  }
185 
186  synchronized public Set<TimeLineEvent> getEventsById(Collection<Long> eventIDs) {
187  return eventIDs.stream()
188  .map(idToEventCache::getUnchecked)
189  .collect(Collectors.toSet());
190 
191  }
192 
193  synchronized public List<EventStripe> getEventStripes(ZoomParams params) {
194  try {
195  return eventStripeCache.get(params);
196  } catch (ExecutionException ex) {
197  LOGGER.log(Level.SEVERE, "Failed to load Event Stripes from cache for " + params.toString(), ex); //NON-NLS
198  return Collections.emptyList();
199  }
200  }
201 
202  synchronized public Map<EventType, Long> countEvents(ZoomParams params) {
203  return eventCountsCache.getUnchecked(params);
204  }
205 
206  synchronized public int countAllEvents() {
207  return eventDB.countAllEvents();
208  }
209 
210  private void invalidateCaches() {
211  minCache.invalidateAll();
212  maxCache.invalidateAll();
213  eventCountsCache.invalidateAll();
214  eventStripeCache.invalidateAll();
215  idToEventCache.invalidateAll();
216  }
217 
218  public Set<Long> getEventIDs(Interval timeRange, RootFilter filter) {
219  return eventDB.getEventIDs(timeRange, filter);
220  }
221 
222  public Interval getSpanningInterval(Collection<Long> eventIDs) {
223  return eventDB.getSpanningInterval(eventIDs);
224  }
225 
226  public boolean hasNewColumns() {
227  return eventDB.hasNewColumns();
228  }
229 
238  public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) {
239  return eventDB.getTagCountsByTagName(eventIDsWithTags);
240  }
241 
248  synchronized private void populateFilterData(SleuthkitCase skCase) {
249 
250  for (Map.Entry<Long, String> hashSet : eventDB.getHashSetNames().entrySet()) {
251  hashSetMap.putIfAbsent(hashSet.getKey(), hashSet.getValue());
252  }
253  //because there is no way to remove a datasource we only add to this map.
254  for (Long id : eventDB.getDataSourceIDs()) {
255  try {
256  datasourcesMap.putIfAbsent(id, skCase.getContentById(id).getDataSource().getName());
257  } catch (TskCoreException ex) {
258  LOGGER.log(Level.SEVERE, "Failed to get datasource by ID.", ex); //NON-NLS
259  }
260  }
261 
262  try {
263  //should this only be tags applied to files or event bearing artifacts?
264  tagNames.setAll(skCase.getTagNamesInUse());
265  } catch (TskCoreException ex) {
266  LOGGER.log(Level.SEVERE, "Failed to get tag names in use.", ex); //NON-NLS
267  }
268  }
269 
270  synchronized public Set<Long> addTag(long objID, Long artifactID, Tag tag, EventDB.EventTransaction trans) {
271  Set<Long> updatedEventIDs = eventDB.addTag(objID, artifactID, tag, trans);
272  if (!updatedEventIDs.isEmpty()) {
273  invalidateCaches(updatedEventIDs);
274  }
275  return updatedEventIDs;
276  }
277 
278  synchronized public Set<Long> deleteTag(long objID, Long artifactID, long tagID, boolean tagged) {
279  Set<Long> updatedEventIDs = eventDB.deleteTag(objID, artifactID, tagID, tagged);
280  if (!updatedEventIDs.isEmpty()) {
281  invalidateCaches(updatedEventIDs);
282  }
283  return updatedEventIDs;
284  }
285 
286  synchronized private void invalidateCaches(Set<Long> updatedEventIDs) {
287  eventCountsCache.invalidateAll();
288  eventStripeCache.invalidateAll();
289  idToEventCache.invalidateAll(updatedEventIDs);
290  try {
291  tagNames.setAll(autoCase.getSleuthkitCase().getTagNamesInUse());
292  } catch (TskCoreException ex) {
293  LOGGER.log(Level.SEVERE, "Failed to get tag names in use.", ex); //NON-NLS
294  }
295  }
296 
305  public void syncTagsFilter(TagsFilter tagsFilter) {
306  for (TagName t : tagNames) {
307  tagsFilter.addSubFilter(new TagNameFilter(t, autoCase));
308  }
309  for (TagNameFilter t : tagsFilter.getSubFilters()) {
310  t.setDisabled(tagNames.contains(t.getTagName()) == false);
311  }
312  }
313 
314  public boolean areFiltersEquivalent(RootFilter f1, RootFilter f2) {
315  return SQLHelper.getSQLWhere(f1).equals(SQLHelper.getSQLWhere(f2));
316  }
317 
319  public boolean isRebuilding() {
320  return dbWorker.isRunning();
321  }
322 
335  public CancellationProgressTask<Void> rebuildRepository(Consumer<Worker.State> onStateChange) {
336  return rebuildRepository(DBPopulationMode.FULL, onStateChange);
337  }
338 
351  public CancellationProgressTask<Void> rebuildTags(Consumer<Worker.State> onStateChange) {
352  return rebuildRepository(DBPopulationMode.TAGS_ONLY, onStateChange);
353  }
354 
367  private CancellationProgressTask<Void> rebuildRepository(final DBPopulationMode mode, Consumer<Worker.State> onStateChange) {
368  LOGGER.log(Level.INFO, "(re)starting {0} db population task", mode); //NON-NLS
369  if (dbWorker != null) {
370  dbWorker.cancel();
371  }
372  dbWorker = new DBPopulationWorker(mode, onStateChange);
373  workerExecutor.execute(dbWorker);
374  return dbWorker;
375  }
376 
377  private enum DBPopulationMode {
378 
381  }
382 
387  private class DBPopulationWorker extends CancellationProgressTask<Void> {
388 
389  private final ReadOnlyBooleanWrapper cancellable = new ReadOnlyBooleanWrapper(true);
390 
392  private final SleuthkitCase skCase;
393  private final TagsManager tagsManager;
394 
395  private ProgressHandle progressHandle;
396 
397  @Override
398  public ReadOnlyBooleanProperty cancellableProperty() {
399  return cancellable.getReadOnlyProperty();
400  }
401 
402  @Override
403  public boolean requestCancel() {
404  Platform.runLater(() -> cancellable.set(false));
405  return super.requestCancel();
406  }
407 
408  @Override
409  protected void updateTitle(String title) {
410  super.updateTitle(title);
411  progressHandle.setDisplayName(title);
412  }
413 
414  @Override
415  protected void updateMessage(String message) {
416  super.updateMessage(message);
417  progressHandle.progress(message);
418  }
419 
420  @Override
421  protected void updateProgress(double workDone, double max) {
422  super.updateProgress(workDone, max);
423  if (workDone >= 0) {
424  progressHandle.progress((int) workDone);
425  }
426  }
427 
428  @Override
429  protected void updateProgress(long workDone, long max) {
430  super.updateProgress(workDone, max);
431  super.updateProgress(workDone, max);
432  if (workDone >= 0) {
433  progressHandle.progress((int) workDone);
434  }
435  }
436 
437  DBPopulationWorker(DBPopulationMode mode, Consumer<Worker.State> onStateChange) {
438  skCase = autoCase.getSleuthkitCase();
439  tagsManager = autoCase.getServices().getTagsManager();
440  this.dbPopulationMode = mode;
441  this.stateProperty().addListener(stateObservable -> onStateChange.accept(getState()));
442  }
443 
444  void restartProgressHandle(String title, String message, Double workDone, double total, Boolean cancellable) {
445  if (progressHandle != null) {
446  progressHandle.finish();
447  }
448  progressHandle = cancellable
449  ? ProgressHandleFactory.createHandle(title, this::requestCancel)
450  : ProgressHandleFactory.createHandle(title);
451 
452  if (workDone < 0) {
453  progressHandle.start();
454  } else {
455  progressHandle.start((int) total);
456  }
457  updateTitle(title);
458  updateMessage(message);
459  updateProgress(workDone, total);
460  }
461 
462  @SuppressWarnings("deprecation") // TODO (EUR-733): Do not use SleuthkitCase.getLastObjectId
463  @Override
464  @NbBundle.Messages({"progressWindow.msg.refreshingFileTags=Refreshing file tags",
465  "progressWindow.msg.refreshingResultTags=Refreshing result tags",
466  "progressWindow.msg.gatheringData=Gathering event data",
467  "progressWindow.msg.commitingDb=Committing events database"})
468  protected Void call() throws Exception {
469  EventDB.EventTransaction trans = null;
470 
471  if (dbPopulationMode == DBPopulationMode.FULL) {
472  //drop old db, and add back MAC and artifact events
473  LOGGER.log(Level.INFO, "Beginning population of timeline db."); // NON-NLS
474  restartProgressHandle(Bundle.progressWindow_msg_gatheringData(), "", -1D, 1, true);
475  //reset database //TODO: can we do more incremental updates? -jm
476  eventDB.reInitializeDB();
477  //grab ids of all files
478  List<Long> fileIDs = skCase.findAllFileIdsWhere("name != '.' AND name != '..'"); //NON-NLS
479  final int numFiles = fileIDs.size();
480 
481  trans = eventDB.beginTransaction();
482  insertMACTimeEvents(numFiles, fileIDs, trans);
484  }
485 
486  //tags
487  if (dbPopulationMode == DBPopulationMode.TAGS_ONLY) {
488  trans = eventDB.beginTransaction();
489  LOGGER.log(Level.INFO, "dropping old tags"); // NON-NLS
490  eventDB.reInitializeTags();
491  }
492 
493  LOGGER.log(Level.INFO, "updating content tags"); // NON-NLS
494  List<ContentTag> contentTags = tagsManager.getAllContentTags();
495  int currentWorkTotal = contentTags.size();
496  restartProgressHandle(Bundle.progressWindow_msg_refreshingFileTags(), "", 0D, currentWorkTotal, true);
497  insertContentTags(currentWorkTotal, contentTags, trans);
498 
499  LOGGER.log(Level.INFO, "updating artifact tags"); // NON-NLS
500  List<BlackboardArtifactTag> artifactTags = tagsManager.getAllBlackboardArtifactTags();
501  currentWorkTotal = artifactTags.size();
502  restartProgressHandle(Bundle.progressWindow_msg_refreshingResultTags(), "", 0D, currentWorkTotal, true);
503  insertArtifactTags(currentWorkTotal, artifactTags, trans);
504 
505  LOGGER.log(Level.INFO, "committing db"); // NON-NLS
506  Platform.runLater(() -> cancellable.set(false));
507  restartProgressHandle(Bundle.progressWindow_msg_commitingDb(), "", -1D, 1, false);
508  eventDB.commitTransaction(trans);
509 
510  eventDB.analyze();
511  populateFilterData(skCase);
513 
514  progressHandle.finish();
515  if (isCancelRequested()) {
516  cancel();
517  }
518  return null;
519  }
520 
521  private void insertArtifactTags(int currentWorkTotal, List<BlackboardArtifactTag> artifactTags, EventDB.EventTransaction trans) {
522  for (int i = 0; i < currentWorkTotal; i++) {
523  if (isCancelRequested()) {
524  break;
525  }
526  updateProgress(i, currentWorkTotal);
527  BlackboardArtifactTag artifactTag = artifactTags.get(i);
528  eventDB.addTag(artifactTag.getContent().getId(), artifactTag.getArtifact().getArtifactID(), artifactTag, trans);
529  }
530  }
531 
532  private void insertContentTags(int currentWorkTotal, List<ContentTag> contentTags, EventDB.EventTransaction trans) {
533  for (int i = 0; i < currentWorkTotal; i++) {
534  if (isCancelRequested()) {
535  break;
536  }
537  updateProgress(i, currentWorkTotal);
538  ContentTag contentTag = contentTags.get(i);
539  eventDB.addTag(contentTag.getContent().getId(), null, contentTag, trans);
540  }
541  }
542 
544  //insert artifact based events
545  //TODO: use (not-yet existing api) to grab all artifacts with timestamps, rather than the hardcoded lists in EventType -jm
546  for (EventType type : RootEventType.allTypes) {
547  if (isCancelRequested()) {
548  break;
549  }
550  //skip file_system events, they are already handled above.
551  if (type instanceof ArtifactEventType) {
552  populateEventType((ArtifactEventType) type, trans);
553  }
554  }
555  }
556 
557  @NbBundle.Messages("progressWindow.msg.populateMacEventsFiles=Populating MAC time events for files")
558  private void insertMACTimeEvents(final int numFiles, List<Long> fileIDs, EventDB.EventTransaction trans) {
559  restartProgressHandle(Bundle.progressWindow_msg_populateMacEventsFiles(), "", 0D, numFiles, true);
560  for (int i = 0; i < numFiles; i++) {
561  if (isCancelRequested()) {
562  break;
563  }
564  long fID = fileIDs.get(i);
565  try {
566  AbstractFile f = skCase.getAbstractFileById(fID);
567 
568  if (isNull(f)) {
569  LOGGER.log(Level.WARNING, "Failed to get data for file : {0}", fID); // NON-NLS
570  } else {
571  insertEventsForFile(f, trans);
572  updateProgress(i, numFiles);
573  updateMessage(f.getName());
574  }
575  } catch (TskCoreException tskCoreException) {
576  LOGGER.log(Level.SEVERE, "Failed to insert MAC time events for file : " + fID, tskCoreException); // NON-NLS
577  }
578  }
579  }
580 
581  private void insertEventsForFile(AbstractFile f, EventDB.EventTransaction trans) throws TskCoreException {
582  //gather time stamps into map
583  EnumMap<FileSystemTypes, Long> timeMap = new EnumMap<>(FileSystemTypes.class);
584  timeMap.put(FileSystemTypes.FILE_CREATED, f.getCrtime());
585  timeMap.put(FileSystemTypes.FILE_ACCESSED, f.getAtime());
586  timeMap.put(FileSystemTypes.FILE_CHANGED, f.getCtime());
587  timeMap.put(FileSystemTypes.FILE_MODIFIED, f.getMtime());
588 
589  /*
590  * if there are no legitimate ( greater tan zero ) time stamps ( eg,
591  * logical/local files) skip the rest of the event generation: this
592  * should result in droping logical files, since they do not have
593  * legitimate time stamps.
594  */
595  if (Collections.max(timeMap.values()) > 0) {
596  final String uniquePath = f.getUniquePath();
597  final String parentPath = f.getParentPath();
598  long datasourceID = f.getDataSource().getId();
599  String datasourceName = StringUtils.substringBeforeLast(uniquePath, parentPath);
600 
601  String rootFolder = StringUtils.substringBefore(StringUtils.substringAfter(parentPath, "/"), "/");
602  String shortDesc = datasourceName + "/" + StringUtils.defaultString(rootFolder);
603  shortDesc = shortDesc.endsWith("/") ? shortDesc : shortDesc + "/";
604  String medDesc = datasourceName + parentPath;
605 
606  final TskData.FileKnown known = f.getKnown();
607  Set<String> hashSets = f.getHashSetNames();
608  List<ContentTag> tags = tagsManager.getContentTagsByContent(f);
609 
610  for (Map.Entry<FileSystemTypes, Long> timeEntry : timeMap.entrySet()) {
611  if (timeEntry.getValue() > 0) {
612  // if the time is legitimate ( greater than zero ) insert it
613  eventDB.insertEvent(timeEntry.getValue(), timeEntry.getKey(),
614  datasourceID, f.getId(), null, uniquePath, medDesc,
615  shortDesc, known, hashSets, tags, trans);
616  }
617  }
618  }
619  }
620 
621  @Override
622  @NbBundle.Messages("msgdlg.problem.text=There was a problem populating the timeline."
623  + " Not all events may be present or accurate.")
624  protected void done() {
625  super.done();
626  try {
627  get();
628  } catch (CancellationException ex) {
629  LOGGER.log(Level.WARNING, "Timeline database population was cancelled by the user. " //NON-NLS
630  + " Not all events may be present or accurate."); // NON-NLS
631  } catch (Exception ex) {
632  LOGGER.log(Level.WARNING, "Unexpected exception while populating database.", ex); // NON-NLS
633  JOptionPane.showMessageDialog(null, Bundle.msgdlg_problem_text());
634  }
635  }
636 
644  @NbBundle.Messages({"# {0} - event type ", "progressWindow.populatingXevents=Populating {0} events"})
646  try {
647  //get all the blackboard artifacts corresponding to the given event sub_type
648  final ArrayList<BlackboardArtifact> blackboardArtifacts = skCase.getBlackboardArtifacts(type.getArtifactType().getTypeID());
649  final int numArtifacts = blackboardArtifacts.size();
650  restartProgressHandle(Bundle.progressWindow_populatingXevents(type.getDisplayName()), "", 0D, numArtifacts, true);
651  for (int i = 0; i < numArtifacts; i++) {
652  try {
653  //for each artifact, extract the relevant information for the descriptions
654  insertEventForArtifact(type, blackboardArtifacts.get(i), trans);
655  updateProgress(i, numArtifacts);
656  } catch (TskCoreException ex) {
657  LOGGER.log(Level.SEVERE, "There was a problem inserting event for artifact: " + blackboardArtifacts.get(i).getArtifactID(), ex); // NON-NLS
658  }
659  }
660  } catch (TskCoreException ex) {
661  LOGGER.log(Level.SEVERE, "There was a problem getting events with sub type " + type.toString() + ".", ex); // NON-NLS
662  }
663  }
664 
665  private void insertEventForArtifact(final ArtifactEventType type, BlackboardArtifact bbart, EventDB.EventTransaction trans) throws TskCoreException {
667 
668  // if the time is legitimate ( greater than zero ) insert it into the db
669  if (eventDescription != null && eventDescription.getTime() > 0) {
670  long objectID = bbart.getObjectID();
671  AbstractFile f = skCase.getAbstractFileById(objectID);
672  long datasourceID = f.getDataSource().getId();
673  long artifactID = bbart.getArtifactID();
674  Set<String> hashSets = f.getHashSetNames();
675  List<BlackboardArtifactTag> tags = tagsManager.getBlackboardArtifactTagsByArtifact(bbart);
676  String fullDescription = eventDescription.getFullDescription();
677  String medDescription = eventDescription.getMedDescription();
678  String shortDescription = eventDescription.getShortDescription();
679  eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, objectID, artifactID, fullDescription, medDescription, shortDescription, null, hashSets, tags, trans);
680  }
681  }
682  }
683 }
final LoadingCache< Long, TimeLineEvent > idToEventCache
static EventDB getEventDB(Case autoCase)
Definition: EventDB.java:107
boolean areFiltersEquivalent(RootFilter f1, RootFilter f2)
Interval getSpanningInterval(Collection< Long > eventIDs)
Definition: EventDB.java:174
void insertMACTimeEvents(final int numFiles, List< Long > fileIDs, EventDB.EventTransaction trans)
Map< String, Long > getTagCountsByTagName(Set< Long > eventIDsWithTags)
synchronized ObservableMap< Long, String > getHashSetMap()
synchronized Map< EventType, Long > countEvents(ZoomParams params)
synchronized void populateFilterData(SleuthkitCase skCase)
synchronized List< EventStripe > getEventStripes(ZoomParams params)
synchronized ObservableMap< Long, String > getDatasourcesMap()
Interval getSpanningInterval(Collection< Long > eventIDs)
void insertEventForArtifact(final ArtifactEventType type, BlackboardArtifact bbart, EventDB.EventTransaction trans)
synchronized Set< Long > deleteTag(long objID, Long artifactID, long tagID, boolean tagged)
synchronized List< BlackboardArtifactTag > getAllBlackboardArtifactTags()
synchronized Set< Long > addTag(long objID, Long artifactID, Tag tag, EventDB.EventTransaction trans)
synchronized List< BlackboardArtifactTag > getBlackboardArtifactTagsByArtifact(BlackboardArtifact artifact)
CancellationProgressTask< Void > rebuildRepository(Consumer< Worker.State > onStateChange)
static final List<?extends EventType > allTypes
Definition: EventType.java:35
final ObservableMap< Long, String > hashSetMap
EventsRepository(Case autoCase, ReadOnlyObjectProperty< ZoomParams > currentStateProperty)
void insertArtifactTags(int currentWorkTotal, List< BlackboardArtifactTag > artifactTags, EventDB.EventTransaction trans)
Set< Long > getEventIDs(Interval timeRange, RootFilter filter)
final LoadingCache< ZoomParams, Map< EventType, Long > > eventCountsCache
void populateEventType(final ArtifactEventType type, EventDB.EventTransaction trans)
void addSubFilter(TagNameFilter tagFilter)
Definition: TagsFilter.java:76
void insertEventsForFile(AbstractFile f, EventDB.EventTransaction trans)
synchronized void invalidateCaches(Set< Long > updatedEventIDs)
synchronized static Logger getLogger(String name)
Definition: Logger.java:166
final ObservableMap< Long, String > datasourcesMap
CancellationProgressTask< Void > rebuildTags(Consumer< Worker.State > onStateChange)
Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter)
final LoadingCache< ZoomParams, List< EventStripe > > eventStripeCache
synchronized Set< TimeLineEvent > getEventsById(Collection< Long > eventIDs)
static AttributeEventDescription buildEventDescription(ArtifactEventType type, BlackboardArtifact artf)
void insertContentTags(int currentWorkTotal, List< ContentTag > contentTags, EventDB.EventTransaction trans)
synchronized List< ContentTag > getContentTagsByContent(Content content)
synchronized List< ContentTag > getAllContentTags()

Copyright © 2012-2015 Basis Technology. Generated on: Wed Apr 6 2016
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.