19 package org.sleuthkit.autopsy.timeline;
21 import com.google.common.cache.CacheBuilder;
22 import com.google.common.cache.LoadingCache;
23 import com.google.common.collect.ImmutableList;
24 import com.google.common.eventbus.EventBus;
25 import java.util.Collection;
26 import java.util.Collections;
27 import java.util.HashSet;
28 import java.util.List;
31 import java.util.concurrent.ExecutionException;
32 import java.util.concurrent.TimeUnit;
33 import java.util.logging.Level;
34 import javafx.beans.InvalidationListener;
35 import javafx.beans.property.ReadOnlyObjectProperty;
36 import javafx.beans.property.ReadOnlyObjectWrapper;
37 import javafx.collections.FXCollections;
38 import javafx.collections.ObservableMap;
39 import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
40 import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
41 import org.joda.time.DateTimeZone;
42 import org.joda.time.Interval;
43 import org.openide.util.NbBundle;
99 private final EventBus
eventbus =
new EventBus(
"EventsModel_EventBus");
111 private final ReadOnlyObjectWrapper<EventsModelParams>
modelParamsProperty =
new ReadOnlyObjectWrapper<>();
113 private final ReadOnlyObjectWrapper<Interval>
timeRangeProperty =
new ReadOnlyObjectWrapper<>();
124 private final LoadingCache<EventsModelParams, Map<TimelineEventType, Long>>
eventCountsCache;
135 return new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey());
157 idsToEventsCache = CacheBuilder.newBuilder()
159 .expireAfterAccess(10, TimeUnit.MINUTES)
161 eventCountsCache = CacheBuilder.newBuilder()
163 .expireAfterAccess(10, TimeUnit.MINUTES)
165 maxEventTimeCache = CacheBuilder.newBuilder()
167 minEventTimeCache = CacheBuilder.newBuilder()
175 InvalidationListener dataSourcesMapListener = observable -> {
176 RootFilterState rootFilter = filterStateProperty.getReadOnlyProperty().get();
177 addDataSourceFilters(rootFilter);
178 filterStateProperty.set(rootFilter.
copyOf());
180 datasourceIDsToNamesMap.addListener(dataSourcesMapListener);
193 modelParamsProperty.addListener(observable -> {
195 if (params != null) {
205 modelParamsProperty.bind(modelParams);
215 datasourceIDsToNamesMap.putIfAbsent(ds.getId(), ds.getName());
225 synchronized void addDataSourceFilters(
RootFilterState rootFilterState) {
243 if (modelParams.getTimeRange() == null) {
244 return Collections.emptyMap();
246 return caseDbEventManager.
countEventsByType(modelParams.getTimeRange().getStartMillis() / 1000,
247 modelParams.getTimeRange().getEndMillis() / 1000,
248 modelParams.getEventFilterState().getActiveFilter(),
249 modelParams.getEventTypesHierarchyLevel());
278 return modelParamsProperty.getReadOnlyProperty();
287 "FilteredEventsModel.timeRangeProperty.errorTitle=Timeline",
288 "FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval."})
290 if (timeRangeProperty.get() == null) {
295 Bundle.FilteredEventsModel_timeRangeProperty_errorMessage());
296 logger.log(Level.SEVERE,
"Error getting spanning interval.", timelineCacheException);
299 return timeRangeProperty.getReadOnlyProperty();
309 return timelineLODProperty.getReadOnlyProperty();
319 return filterStateProperty.getReadOnlyProperty();
338 return modelParamsProperty.get();
390 datasourceIDsToNamesMap.entrySet().forEach(dataSourceEntry
405 Collections.emptySet()));
407 return rootFilterState;
422 return idsToEventsCache.get(eventID);
423 }
catch (ExecutionException ex) {
439 Set<TimelineEvent> events =
new HashSet<>();
440 for (Long
id : eventIDs) {
459 final Interval overlap;
461 synchronized (
this) {
465 return caseDbEventManager.
getEventIDs(overlap, intersection);
512 synchronized (
this) {
517 return eventCountsCache.get(
new EventsModelParams(timeRange, typeZoom, filter, null));
518 }
catch (ExecutionException executionException) {
519 throw new TskCoreException(
"Error getting cached event counts.`1", executionException);
575 return minEventTimeCache.get(
"min");
576 }
catch (ExecutionException ex) {
577 throw new TskCoreException(
"Error getting cached min time.", ex);
592 return maxEventTimeCache.get(
"max");
593 }
catch (ExecutionException ex) {
594 throw new TskCoreException(
"Error getting cached max time.", ex);
613 if (isNotEmpty(updatedEventIDs)) {
634 if (isNotEmpty(updatedEventIDs)) {
655 if (isNotEmpty(updatedEventIDs)) {
687 if (isNotEmpty(updatedEventIDs)) {
703 boolean tagsUpdated = !updatedEventIDs.isEmpty();
720 boolean tagsUpdated = !updatedEventIDs.isEmpty();
734 eventbus.register(subscriber);
743 eventbus.unregister(subscriber);
774 Set<Long> updatedEventIDs =
new HashSet<>();
779 if (isNotEmpty(updatedEventIDs)) {
782 return updatedEventIDs;
796 minEventTimeCache.invalidateAll();
797 maxEventTimeCache.invalidateAll();
798 idsToEventsCache.invalidateAll(emptyIfNull(updatedEventIDs));
799 eventCountsCache.invalidateAll();
TimelineLevelOfDetail getTimelineLOD()
List< Long > getEventIDs(Interval timeRange, TimelineFilter.RootFilter filter)
synchronized boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt)
Set< Long > getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts)
synchronized Set< Long > updateEventsForHashSetHits(Collection< BlackboardArtifact > hashSetHitArtifacts)
ImmutableList< TimelineEventType > getEventTypes()
Set< TimelineEvent > getEventsById(Collection< Long > eventIDs)
synchronized void unRegisterForEvents(Object subscriber)
synchronized void invalidateCaches(Collection< Long > updatedEventIDs)
synchronized RootFilterState getEventFilterState()
ImmutableList< TimelineEventType > getEventTypes()
Set< Long > getEventIDsForContent(Content content, boolean includeDerivedArtifacts)
BlackboardArtifact getArtifact()
final TimelineManager caseDbEventManager
List< DataSource > getDataSources()
Interval getSpanningInterval(Collection< Long > eventIDs)
Set< Long > updateEventsForContentTagAdded(Content content)
Interval getSpanningInterval(Collection< Long > eventIDs)
Content getContentById(long id)
final LoadingCache< Long, TimelineEvent > idsToEventsCache
RootFilter getActiveFilter()
Set< Long > updateEventsForContentTagDeleted(Content content)
static DataSourceFilter newDataSourceFilter(Map.Entry< Long, String > dataSourceEntry)
Set< Long > updateEventsForHashSetHit(Content content)
Interval getSpanningInterval()
TimelineManager getEventManager()
SleuthkitCase getSleuthkitCase()
synchronized boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt)
synchronized EventsModelParams getModelParams()
List< Long > getEventIDs(Interval timeRange, FilterState<?extends TimelineFilter > filterState)
BlackboardArtifact getBlackboardArtifact(long artifactID)
RootFilterState getEventFilterState()
static final Logger logger
Interval getSpanningInterval(DateTimeZone timeZone)
synchronized ReadOnlyObjectProperty< EventsModelParams > modelParamsProperty()
final LoadingCache< EventsModelParams, Map< TimelineEventType, Long > > eventCountsCache
TimelineManager getTimelineManager()
final LoadingCache< Object, Long > maxEventTimeCache
EventsModel(Case currentCase, ReadOnlyObjectProperty< EventsModelParams > modelParams)
boolean postTagsAdded(Set< Long > updatedEventIDs)
Set< Long > updateEventsForArtifactTagDeleted(BlackboardArtifact artifact)
TimelineEventType.HierarchyLevel getEventTypesHierarchyLevel()
final ReadOnlyObjectWrapper< TimelineLevelOfDetail > timelineLODProperty
final ReadOnlyObjectWrapper< RootFilterState > filterStateProperty
synchronized TimelineEventType.HierarchyLevel getEventTypeZoom()
synchronized RootFilterState getDefaultEventFilterState()
SleuthkitCase getSleuthkitCase()
boolean postTagsDeleted(Set< Long > updatedEventIDs)
synchronized void populateDataSourcesCache()
final ObservableMap< Long, String > datasourceIDsToNamesMap
synchronized ReadOnlyObjectProperty< Interval > timeRangeProperty()
static FileTypesFilter createDefaultFileTypesFilter()
synchronized void registerForEvents(Object subscriber)
Map< TimelineEventType, Long > countEventsByType(Long startTime, Long endTime, TimelineFilter.RootFilter filter, TimelineEventType.HierarchyLevel typeHierachyLevel)
synchronized ReadOnlyObjectProperty< TimelineEventType.HierarchyLevel > eventTypesHierarchyLevelProperty()
List< Long > getEventIDsForArtifact(BlackboardArtifact artifact)
RootFilterState intersect(FilterState< ?extends TimelineFilter > other)
static void error(String title, String message)
TimelineEventType ROOT_EVENT_TYPE
synchronized static Logger getLogger(String name)
Map< TimelineEventType, Long > countEventsByType(EventsModelParams modelParams)
synchronized boolean handleContentTagDeleted(ContentTagDeletedEvent evt)
synchronized TimelineLevelOfDetail getDescriptionLOD()
void postRefreshRequest()
synchronized boolean handleContentTagAdded(ContentTagAddedEvent evt)
synchronized ReadOnlyObjectProperty< TimelineLevelOfDetail > descriptionLODProperty()
CompoundFilterState< DataSourceFilter, DataSourcesFilter > getDataSourcesFilterState()
void addSubFilter(SubFilterType subfilter)
synchronized Interval getTimeRange()
TimelineEvent getEventById(Long eventID)
List< Long > getEventIDsForArtifact(BlackboardArtifact artifact)
Map< TimelineEventType, Long > getEventCounts(Interval timeRange)
synchronized ReadOnlyObjectProperty< RootFilterState > eventFilterProperty()
Set< Long > updateEventsForArtifactTagAdded(BlackboardArtifact artifact)
final LoadingCache< Object, Long > minEventTimeCache