19 package org.sleuthkit.autopsy.timeline.db;
21 import com.google.common.cache.CacheBuilder;
22 import com.google.common.cache.CacheLoader;
23 import com.google.common.cache.LoadingCache;
24 import com.google.common.util.concurrent.ThreadFactoryBuilder;
25 import java.util.ArrayList;
26 import java.util.Collection;
27 import java.util.Collections;
28 import java.util.EnumMap;
29 import java.util.List;
31 import static java.util.Objects.isNull;
33 import java.util.concurrent.CancellationException;
34 import java.util.concurrent.ExecutionException;
35 import java.util.concurrent.Executor;
36 import java.util.concurrent.Executors;
37 import java.util.concurrent.TimeUnit;
38 import java.util.function.Consumer;
39 import java.util.logging.Level;
40 import java.util.stream.Collectors;
41 import javafx.application.Platform;
42 import javafx.beans.property.ReadOnlyBooleanProperty;
43 import javafx.beans.property.ReadOnlyBooleanWrapper;
44 import javafx.beans.property.ReadOnlyObjectProperty;
45 import javafx.collections.FXCollections;
46 import javafx.collections.ObservableList;
47 import javafx.collections.ObservableMap;
48 import javafx.concurrent.Worker;
49 import javax.swing.JOptionPane;
50 import org.apache.commons.lang3.StringUtils;
51 import org.joda.time.Interval;
52 import org.netbeans.api.progress.ProgressHandle;
53 import org.netbeans.api.progress.ProgressHandleFactory;
54 import org.openide.util.NbBundle;
100 private final Executor
workerExecutor = Executors.newSingleThreadExecutor(
new ThreadFactoryBuilder().setNameFormat(
"eventrepository-worker-%d").build());
112 private final ObservableMap<Long, String>
datasourcesMap = FXCollections.observableHashMap();
113 private final ObservableMap<Long, String>
hashSetMap = FXCollections.observableHashMap();
114 private final ObservableList<TagName>
tagNames = FXCollections.observableArrayList();
133 return eventDB.getBoundingEventsInterval(timeRange, filter);
149 idToEventCache = CacheBuilder.newBuilder()
151 .expireAfterAccess(10, TimeUnit.MINUTES)
152 .build(CacheLoader.from(eventDB::getEventById));
153 eventCountsCache = CacheBuilder.newBuilder()
155 .expireAfterAccess(10, TimeUnit.MINUTES)
156 .build(CacheLoader.from(eventDB::countEventsByType));
157 eventStripeCache = CacheBuilder.newBuilder()
159 .expireAfterAccess(10, TimeUnit.MINUTES
160 ).build(CacheLoader.from(eventDB::getEventStripes));
161 maxCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMaxTime));
162 minCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMinTime));
170 return maxCache.getUnchecked(
"max");
178 return minCache.getUnchecked(
"min");
183 return idToEventCache.getUnchecked(eventID);
186 synchronized public Set<TimeLineEvent>
getEventsById(Collection<Long> eventIDs) {
187 return eventIDs.stream()
188 .map(idToEventCache::getUnchecked)
189 .collect(Collectors.toSet());
195 return eventStripeCache.get(params);
196 }
catch (ExecutionException ex) {
197 LOGGER.log(Level.SEVERE,
"Failed to load Event Stripes from cache for " + params.
toString(), ex);
198 return Collections.emptyList();
203 return eventCountsCache.getUnchecked(params);
207 return eventDB.countAllEvents();
211 minCache.invalidateAll();
212 maxCache.invalidateAll();
213 eventCountsCache.invalidateAll();
214 eventStripeCache.invalidateAll();
215 idToEventCache.invalidateAll();
219 return eventDB.getEventIDs(timeRange, filter);
227 return eventDB.hasNewColumns();
239 return eventDB.getTagCountsByTagName(eventIDsWithTags);
250 for (Map.Entry<Long, String> hashSet : eventDB.getHashSetNames().entrySet()) {
251 hashSetMap.putIfAbsent(hashSet.getKey(), hashSet.getValue());
254 for (Long
id : eventDB.getDataSourceIDs()) {
256 datasourcesMap.putIfAbsent(
id, skCase.getContentById(
id).getDataSource().getName());
257 }
catch (TskCoreException ex) {
258 LOGGER.log(Level.SEVERE,
"Failed to get datasource by ID.", ex);
264 tagNames.setAll(skCase.getTagNamesInUse());
265 }
catch (TskCoreException ex) {
266 LOGGER.log(Level.SEVERE,
"Failed to get tag names in use.", ex);
271 Set<Long> updatedEventIDs = eventDB.addTag(objID, artifactID, tag, trans);
272 if (!updatedEventIDs.isEmpty()) {
275 return updatedEventIDs;
278 synchronized public Set<Long>
deleteTag(
long objID, Long artifactID,
long tagID,
boolean tagged) {
279 Set<Long> updatedEventIDs = eventDB.deleteTag(objID, artifactID, tagID, tagged);
280 if (!updatedEventIDs.isEmpty()) {
283 return updatedEventIDs;
287 eventCountsCache.invalidateAll();
288 eventStripeCache.invalidateAll();
289 idToEventCache.invalidateAll(updatedEventIDs);
292 }
catch (TskCoreException ex) {
293 LOGGER.log(Level.SEVERE,
"Failed to get tag names in use.", ex);
306 for (TagName t : tagNames) {
310 t.setDisabled(tagNames.contains(t.getTagName()) ==
false);
315 return SQLHelper.getSQLWhere(f1).equals(SQLHelper.getSQLWhere(f2));
320 return dbWorker.isRunning();
368 LOGGER.log(Level.INFO,
"(re)starting {0} db population task", mode);
369 if (dbWorker != null) {
373 workerExecutor.execute(dbWorker);
389 private final ReadOnlyBooleanWrapper
cancellable =
new ReadOnlyBooleanWrapper(
true);
399 return cancellable.getReadOnlyProperty();
404 Platform.runLater(() -> cancellable.set(
false));
405 return super.requestCancel();
410 super.updateTitle(title);
411 progressHandle.setDisplayName(title);
416 super.updateMessage(message);
417 progressHandle.progress(message);
422 super.updateProgress(workDone, max);
424 progressHandle.progress((
int) workDone);
430 super.updateProgress(workDone, max);
431 super.updateProgress(workDone, max);
433 progressHandle.progress((
int) workDone);
440 this.dbPopulationMode = mode;
441 this.stateProperty().addListener(stateObservable -> onStateChange.accept(getState()));
444 void restartProgressHandle(String title, String message, Double workDone,
double total, Boolean cancellable) {
445 if (progressHandle != null) {
446 progressHandle.finish();
448 progressHandle = cancellable
449 ? ProgressHandleFactory.createHandle(title, this::requestCancel)
450 : ProgressHandleFactory.createHandle(title);
453 progressHandle.start();
455 progressHandle.start((
int) total);
462 @SuppressWarnings(
"deprecation")
464 @NbBundle.Messages({
"progressWindow.msg.refreshingFileTags=Refreshing file tags",
465 "progressWindow.msg.refreshingResultTags=Refreshing result tags",
466 "progressWindow.msg.gatheringData=Gathering event data",
467 "progressWindow.msg.commitingDb=Committing events database"})
468 protected Void
call()
throws Exception {
473 LOGGER.log(Level.INFO,
"Beginning population of timeline db.");
474 restartProgressHandle(Bundle.progressWindow_msg_gatheringData(),
"", -1D, 1,
true);
476 eventDB.reInitializeDB();
478 List<Long> fileIDs = skCase.findAllFileIdsWhere(
"name != '.' AND name != '..'");
479 final int numFiles = fileIDs.size();
481 trans = eventDB.beginTransaction();
488 trans = eventDB.beginTransaction();
489 LOGGER.log(Level.INFO,
"dropping old tags");
490 eventDB.reInitializeTags();
493 LOGGER.log(Level.INFO,
"updating content tags");
495 int currentWorkTotal = contentTags.size();
496 restartProgressHandle(Bundle.progressWindow_msg_refreshingFileTags(),
"", 0D, currentWorkTotal,
true);
499 LOGGER.log(Level.INFO,
"updating artifact tags");
501 currentWorkTotal = artifactTags.size();
502 restartProgressHandle(Bundle.progressWindow_msg_refreshingResultTags(),
"", 0D, currentWorkTotal,
true);
505 LOGGER.log(Level.INFO,
"committing db");
506 Platform.runLater(() -> cancellable.set(
false));
507 restartProgressHandle(Bundle.progressWindow_msg_commitingDb(),
"", -1D, 1,
false);
508 eventDB.commitTransaction(trans);
514 progressHandle.finish();
522 for (
int i = 0; i < currentWorkTotal; i++) {
527 BlackboardArtifactTag artifactTag = artifactTags.get(i);
528 eventDB.addTag(artifactTag.getContent().getId(), artifactTag.getArtifact().getArtifactID(), artifactTag, trans);
533 for (
int i = 0; i < currentWorkTotal; i++) {
538 ContentTag contentTag = contentTags.get(i);
539 eventDB.addTag(contentTag.getContent().getId(), null, contentTag, trans);
557 @NbBundle.Messages(
"progressWindow.msg.populateMacEventsFiles=Populating MAC time events for files")
559 restartProgressHandle(Bundle.progressWindow_msg_populateMacEventsFiles(),
"", 0D, numFiles,
true);
560 for (
int i = 0; i < numFiles; i++) {
564 long fID = fileIDs.get(i);
566 AbstractFile f = skCase.getAbstractFileById(fID);
569 LOGGER.log(Level.WARNING,
"Failed to get data for file : {0}", fID);
575 }
catch (TskCoreException tskCoreException) {
576 LOGGER.log(Level.SEVERE,
"Failed to insert MAC time events for file : " + fID, tskCoreException);
583 EnumMap<FileSystemTypes, Long> timeMap =
new EnumMap<>(
FileSystemTypes.class);
595 if (Collections.max(timeMap.values()) > 0) {
596 final String uniquePath = f.getUniquePath();
597 final String parentPath = f.getParentPath();
598 long datasourceID = f.getDataSource().getId();
599 String datasourceName = StringUtils.substringBeforeLast(uniquePath, parentPath);
601 String rootFolder = StringUtils.substringBefore(StringUtils.substringAfter(parentPath,
"/"),
"/");
602 String shortDesc = datasourceName +
"/" + StringUtils.defaultString(rootFolder);
603 shortDesc = shortDesc.endsWith(
"/") ? shortDesc : shortDesc +
"/";
604 String medDesc = datasourceName + parentPath;
606 final TskData.FileKnown known = f.getKnown();
607 Set<String> hashSets = f.getHashSetNames();
610 for (Map.Entry<
FileSystemTypes, Long> timeEntry : timeMap.entrySet()) {
611 if (timeEntry.getValue() > 0) {
613 eventDB.insertEvent(timeEntry.getValue(), timeEntry.getKey(),
614 datasourceID, f.getId(), null, uniquePath, medDesc,
615 shortDesc, known, hashSets, tags, trans);
622 @NbBundle.Messages(
"msgdlg.problem.text=There was a problem populating the timeline."
623 +
" Not all events may be present or accurate.")
628 }
catch (CancellationException ex) {
629 LOGGER.log(Level.WARNING,
"Timeline database population was cancelled by the user. "
630 +
" Not all events may be present or accurate.");
631 }
catch (Exception ex) {
632 LOGGER.log(Level.WARNING,
"Unexpected exception while populating database.", ex);
633 JOptionPane.showMessageDialog(null, Bundle.msgdlg_problem_text());
644 @NbBundle.Messages({
"# {0} - event type ",
"progressWindow.populatingXevents=Populating {0} events"})
648 final ArrayList<BlackboardArtifact> blackboardArtifacts = skCase.getBlackboardArtifacts(type.
getArtifactType().getTypeID());
649 final int numArtifacts = blackboardArtifacts.size();
650 restartProgressHandle(Bundle.progressWindow_populatingXevents(type.
getDisplayName()),
"", 0D, numArtifacts,
true);
651 for (
int i = 0; i < numArtifacts; i++) {
656 }
catch (TskCoreException ex) {
657 LOGGER.log(Level.SEVERE,
"There was a problem inserting event for artifact: " + blackboardArtifacts.get(i).getArtifactID(), ex);
660 }
catch (TskCoreException ex) {
661 LOGGER.log(Level.SEVERE,
"There was a problem getting events with sub type " + type.toString() +
".", ex);
669 if (eventDescription != null && eventDescription.getTime() > 0) {
670 long objectID = bbart.getObjectID();
671 AbstractFile f = skCase.getAbstractFileById(objectID);
672 long datasourceID = f.getDataSource().getId();
673 long artifactID = bbart.getArtifactID();
674 Set<String> hashSets = f.getHashSetNames();
676 String fullDescription = eventDescription.getFullDescription();
677 String medDescription = eventDescription.getMedDescription();
678 String shortDescription = eventDescription.getShortDescription();
679 eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, objectID, artifactID, fullDescription, medDescription, shortDescription, null, hashSets, tags, trans);
final LoadingCache< Long, TimeLineEvent > idToEventCache
void insertArtifactDerivedEvents(EventDB.EventTransaction trans)
static EventDB getEventDB(Case autoCase)
final TagsManager tagsManager
final FilteredEventsModel modelInstance
boolean areFiltersEquivalent(RootFilter f1, RootFilter f2)
Interval getSpanningInterval(Collection< Long > eventIDs)
FilteredEventsModel getEventsModel()
void insertMACTimeEvents(final int numFiles, List< Long > fileIDs, EventDB.EventTransaction trans)
void updateProgress(double workDone, double max)
Map< String, Long > getTagCountsByTagName(Set< Long > eventIDsWithTags)
synchronized ObservableMap< Long, String > getHashSetMap()
synchronized Map< EventType, Long > countEvents(ZoomParams params)
synchronized void populateFilterData(SleuthkitCase skCase)
synchronized List< EventStripe > getEventStripes(ZoomParams params)
synchronized ObservableMap< Long, String > getDatasourcesMap()
Interval getSpanningInterval(Collection< Long > eventIDs)
void insertEventForArtifact(final ArtifactEventType type, BlackboardArtifact bbart, EventDB.EventTransaction trans)
synchronized Set< Long > deleteTag(long objID, Long artifactID, long tagID, boolean tagged)
void updateMessage(String message)
ReadOnlyBooleanProperty cancellableProperty()
synchronized Set< Long > addTag(long objID, Long artifactID, Tag tag, EventDB.EventTransaction trans)
CancellationProgressTask< Void > rebuildRepository(Consumer< Worker.State > onStateChange)
final ReadOnlyBooleanWrapper cancellable
ObservableList< TagName > getTagNames()
void updateTitle(String title)
static final List<?extends EventType > allTypes
synchronized int countAllEvents()
TagsManager getTagsManager()
void syncTagsFilter(TagsFilter tagsFilter)
final ObservableMap< Long, String > hashSetMap
EventsRepository(Case autoCase, ReadOnlyObjectProperty< ZoomParams > currentStateProperty)
void insertArtifactTags(int currentWorkTotal, List< BlackboardArtifactTag > artifactTags, EventDB.EventTransaction trans)
Set< Long > getEventIDs(Interval timeRange, RootFilter filter)
final LoadingCache< ZoomParams, Map< EventType, Long > > eventCountsCache
void populateEventType(final ArtifactEventType type, EventDB.EventTransaction trans)
void updateProgress(long workDone, long max)
SleuthkitCase getSleuthkitCase()
static final Logger LOGGER
void insertEventsForFile(AbstractFile f, EventDB.EventTransaction trans)
synchronized void invalidateCaches(Set< Long > updatedEventIDs)
final Executor workerExecutor
synchronized boolean isCancelRequested()
final ObservableList< TagName > tagNames
synchronized static Logger getLogger(String name)
final ObservableMap< Long, String > datasourcesMap
CancellationProgressTask< Void > rebuildTags(Consumer< Worker.State > onStateChange)
TimeLineEvent getEventById(Long eventID)
Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter)
final SleuthkitCase skCase
final LoadingCache< ZoomParams, List< EventStripe > > eventStripeCache
synchronized Set< TimeLineEvent > getEventsById(Collection< Long > eventIDs)
final LoadingCache< Object, Long > minCache
BlackboardArtifact.Type getArtifactType()
ProgressHandle progressHandle
final DBPopulationMode dbPopulationMode
static AttributeEventDescription buildEventDescription(ArtifactEventType type, BlackboardArtifact artf)
void insertContentTags(int currentWorkTotal, List< ContentTag > contentTags, EventDB.EventTransaction trans)
DBPopulationWorker dbWorker
final LoadingCache< Object, Long > maxCache