Sleuth Kit Java Bindings (JNI) 4.14.0
Java bindings for using The Sleuth Kit
Loading...
Searching...
No Matches
TimelineManager.java
Go to the documentation of this file.
1/*
2 * Sleuth Kit Data Model
3 *
4 * Copyright 2018-2020 Basis Technology Corp.
5 * Contact: carrier <at> sleuthkit <dot> org
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 */
19package org.sleuthkit.datamodel;
20
21import com.google.common.annotations.Beta;
22import com.google.common.collect.ImmutableList;
23import com.google.common.collect.ImmutableMap;
24import java.sql.PreparedStatement;
25import java.sql.ResultSet;
26import java.sql.SQLException;
27import java.sql.Statement;
28import java.sql.Types;
29import java.text.MessageFormat;
30import java.time.Instant;
31import java.util.ArrayList;
32import java.util.Collection;
33import java.util.Collections;
34import java.util.HashMap;
35import java.util.HashSet;
36import java.util.List;
37import java.util.Map;
38import java.util.Objects;
39import java.util.Optional;
40import java.util.Set;
41import java.util.logging.Level;
42import java.util.logging.Logger;
43import java.util.stream.Collectors;
44import java.util.stream.Stream;
45import org.joda.time.DateTimeZone;
46import org.joda.time.Interval;
47import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TL_EVENT;
48import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TL_EVENT_TYPE;
49import static org.sleuthkit.datamodel.CollectionUtils.isNotEmpty;
50import static org.sleuthkit.datamodel.CommManagerSqlStringUtils.buildCSVString;
51import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
52import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
53import static org.sleuthkit.datamodel.SleuthkitCase.escapeSingleQuotes;
54
58public final class TimelineManager {
59
60 private static final Logger logger = Logger.getLogger(TimelineManager.class.getName());
61
65 private static final ImmutableList<TimelineEventType> ROOT_CATEGORY_AND_FILESYSTEM_TYPES
66 = ImmutableList.of(
75
82 private static final ImmutableList<TimelineEventType> PREDEFINED_EVENT_TYPES
83 = new ImmutableList.Builder<TimelineEventType>()
86 .build();
87
88 // all known artifact type ids (used for determining if an artifact is standard or custom event)
89 private static final Set<Integer> ARTIFACT_TYPE_IDS = Stream.of(BlackboardArtifact.ARTIFACT_TYPE.values())
90 .map(artType -> artType.getTypeID())
91 .collect(Collectors.toSet());
92
93 private final SleuthkitCase caseDB;
94
99 private static final Long MAX_TIMESTAMP_TO_ADD = Instant.now().getEpochSecond() + 394200000;
100
104 private final Map<Long, TimelineEventType> eventTypeIDMap = new HashMap<>();
105
115 TimelineManager(SleuthkitCase caseDB) throws TskCoreException {
116 this.caseDB = caseDB;
117
118 List<TimelineEventType> fullList = new ArrayList<>();
119 fullList.addAll(ROOT_CATEGORY_AND_FILESYSTEM_TYPES);
120 fullList.addAll(PREDEFINED_EVENT_TYPES);
121
122 caseDB.acquireSingleUserCaseWriteLock();
123 try (final CaseDbConnection con = caseDB.getConnection();
124 final PreparedStatement pStatement = con.prepareStatement(
125 insertOrIgnore(" INTO tsk_event_types(event_type_id, display_name, super_type_id) VALUES (?, ?, ?)"),
126 Statement.NO_GENERATED_KEYS)) {
127 for (TimelineEventType type : fullList) {
128 pStatement.setLong(1, type.getTypeID());
129 pStatement.setString(2, escapeSingleQuotes(type.getDisplayName()));
130 if (type != type.getParent()) {
131 pStatement.setLong(3, type.getParent().getTypeID());
132 } else {
133 pStatement.setNull(3, java.sql.Types.INTEGER);
134 }
135
136 con.executeUpdate(pStatement);
137 eventTypeIDMap.put(type.getTypeID(), type);
138 }
139 } catch (SQLException ex) {
140 throw new TskCoreException("Failed to initialize timeline event types", ex); // NON-NLS
141 } finally {
142 caseDB.releaseSingleUserCaseWriteLock();
143 }
144 }
145
157 public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreException {
158 if (eventIDs.isEmpty()) {
159 return null;
160 }
161 final String query = "SELECT Min(time) as minTime, Max(time) as maxTime FROM tsk_events WHERE event_id IN (" + buildCSVString(eventIDs) + ")"; //NON-NLS
162 caseDB.acquireSingleUserCaseReadLock();
163 try (CaseDbConnection con = caseDB.getConnection();
164 Statement stmt = con.createStatement();
165 ResultSet results = stmt.executeQuery(query);) {
166 if (results.next()) {
167 return new Interval(results.getLong("minTime") * 1000, (results.getLong("maxTime") + 1) * 1000, DateTimeZone.UTC); // NON-NLS
168 }
169 } catch (SQLException ex) {
170 throw new TskCoreException("Error executing get spanning interval query: " + query, ex); // NON-NLS
171 } finally {
172 caseDB.releaseSingleUserCaseReadLock();
173 }
174 return null;
175 }
176
189 public Interval getSpanningInterval(Interval timeRange, TimelineFilter.RootFilter filter, DateTimeZone timeZone) throws TskCoreException {
190 long start = timeRange.getStartMillis() / 1000;
191 long end = timeRange.getEndMillis() / 1000;
192 String sqlWhere = getSQLWhere(filter);
193 String augmentedEventsTablesSQL = getAugmentedEventsTablesSQL(filter);
194 String queryString = " SELECT (SELECT Max(time) FROM " + augmentedEventsTablesSQL
195 + " WHERE time <=" + start + " AND " + sqlWhere + ") AS start,"
196 + " (SELECT Min(time) FROM " + augmentedEventsTablesSQL
197 + " WHERE time >= " + end + " AND " + sqlWhere + ") AS end";//NON-NLS
198 caseDB.acquireSingleUserCaseReadLock();
199 try (CaseDbConnection con = caseDB.getConnection();
200 Statement stmt = con.createStatement(); //can't use prepared statement because of complex where clause
201 ResultSet results = stmt.executeQuery(queryString);) {
202
203 if (results.next()) {
204 long start2 = results.getLong("start"); // NON-NLS
205 long end2 = results.getLong("end"); // NON-NLS
206
207 if (end2 == 0) {
208 end2 = getMaxEventTime();
209 }
210 return new Interval(start2 * 1000, (end2 + 1) * 1000, timeZone);
211 }
212 } catch (SQLException ex) {
213 throw new TskCoreException("Failed to get MIN time.", ex); // NON-NLS
214 } finally {
215 caseDB.releaseSingleUserCaseReadLock();
216 }
217 return null;
218 }
219
229 public TimelineEvent getEventById(long eventID) throws TskCoreException {
230 String sql = "SELECT * FROM " + getAugmentedEventsTablesSQL(false) + " WHERE event_id = " + eventID;
231 caseDB.acquireSingleUserCaseReadLock();
232 try (CaseDbConnection con = caseDB.getConnection();
233 Statement stmt = con.createStatement();) {
234 try (ResultSet results = stmt.executeQuery(sql);) {
235 if (results.next()) {
236 int typeID = results.getInt("event_type_id");
237 TimelineEventType type = getEventType(typeID).orElseThrow(() -> newEventTypeMappingException(typeID)); //NON-NLS
238 return new TimelineEvent(eventID,
239 results.getLong("data_source_obj_id"),
240 results.getLong("content_obj_id"),
241 results.getLong("artifact_id"),
242 results.getLong("time"),
243 type, results.getString("full_description"),
244 results.getString("med_description"),
245 results.getString("short_description"),
246 intToBoolean(results.getInt("hash_hit")),
247 intToBoolean(results.getInt("tagged")));
248 }
249 }
250 } catch (SQLException sqlEx) {
251 throw new TskCoreException("Error while executing query " + sql, sqlEx); // NON-NLS
252 } finally {
253 caseDB.releaseSingleUserCaseReadLock();
254 }
255 return null;
256 }
257
269 public List<Long> getEventIDs(Interval timeRange, TimelineFilter.RootFilter filter) throws TskCoreException {
270 Long startTime = timeRange.getStartMillis() / 1000;
271 Long endTime = timeRange.getEndMillis() / 1000;
272
273 if (Objects.equals(startTime, endTime)) {
274 endTime++; //make sure end is at least 1 millisecond after start
275 }
276
277 ArrayList<Long> resultIDs = new ArrayList<>();
278
279 String query = "SELECT tsk_events.event_id AS event_id FROM " + getAugmentedEventsTablesSQL(filter)
280 + " WHERE time >= " + startTime + " AND time <" + endTime + " AND " + getSQLWhere(filter) + " ORDER BY time ASC"; // NON-NLS
281 caseDB.acquireSingleUserCaseReadLock();
282 try (CaseDbConnection con = caseDB.getConnection();
283 Statement stmt = con.createStatement();
284 ResultSet results = stmt.executeQuery(query);) {
285 while (results.next()) {
286 resultIDs.add(results.getLong("event_id")); //NON-NLS
287 }
288
289 } catch (SQLException sqlEx) {
290 throw new TskCoreException("Error while executing query " + query, sqlEx); // NON-NLS
291 } finally {
292 caseDB.releaseSingleUserCaseReadLock();
293 }
294
295 return resultIDs;
296 }
297
306 public Long getMaxEventTime() throws TskCoreException {
307 caseDB.acquireSingleUserCaseReadLock();
308 try (CaseDbConnection con = caseDB.getConnection();
309 Statement stms = con.createStatement();
310 ResultSet results = stms.executeQuery(STATEMENTS.GET_MAX_TIME.getSQL());) {
311 if (results.next()) {
312 return results.getLong("max"); // NON-NLS
313 }
314 } catch (SQLException ex) {
315 throw new TskCoreException("Error while executing query " + STATEMENTS.GET_MAX_TIME.getSQL(), ex); // NON-NLS
316 } finally {
317 caseDB.releaseSingleUserCaseReadLock();
318 }
319 return -1l;
320 }
321
330 public Long getMinEventTime() throws TskCoreException {
331 caseDB.acquireSingleUserCaseReadLock();
332 try (CaseDbConnection con = caseDB.getConnection();
333 Statement stms = con.createStatement();
334 ResultSet results = stms.executeQuery(STATEMENTS.GET_MIN_TIME.getSQL());) {
335 if (results.next()) {
336 return results.getLong("min"); // NON-NLS
337 }
338 } catch (SQLException ex) {
339 throw new TskCoreException("Error while executing query " + STATEMENTS.GET_MAX_TIME.getSQL(), ex); // NON-NLS
340 } finally {
341 caseDB.releaseSingleUserCaseReadLock();
342 }
343 return -1l;
344 }
345
354 public Optional<TimelineEventType> getEventType(long eventTypeID) {
355 // The parent EventType with ID 22 has been deprecated. This ID had two
356 // children which have be reassigned to MISC_TYPES.
358 return Optional.of(TimelineEventType.MISC_TYPES);
359 }
360
361 return Optional.ofNullable(eventTypeIDMap.get(eventTypeID));
362 }
363
369 public ImmutableList<TimelineEventType> getEventTypes() {
370 return ImmutableList.copyOf(eventTypeIDMap.values());
371 }
372
373 private String insertOrIgnore(String query) {
374 switch (caseDB.getDatabaseType()) {
375 case POSTGRESQL:
376 return " INSERT " + query + " ON CONFLICT DO NOTHING "; //NON-NLS
377 case SQLITE:
378 return " INSERT OR IGNORE " + query; //NON-NLS
379 default:
380 throw new UnsupportedOperationException("Unsupported DB type: " + caseDB.getDatabaseType().name());
381 }
382 }
383
387 private enum STATEMENTS {
388
389 GET_MAX_TIME("SELECT Max(time) AS max FROM tsk_events"), // NON-NLS
390 GET_MIN_TIME("SELECT Min(time) AS min FROM tsk_events"); // NON-NLS
391
392 private final String sql;
393
394 private STATEMENTS(String sql) {
395 this.sql = sql;
396 }
397
398 String getSQL() {
399 return sql;
400 }
401 }
402
413 public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException {
414 ArrayList<Long> eventIDs = new ArrayList<>();
415
416 String query
417 = "SELECT event_id FROM tsk_events "
418 + " LEFT JOIN tsk_event_descriptions on ( tsk_events.event_description_id = tsk_event_descriptions.event_description_id ) "
419 + " WHERE artifact_id = " + artifact.getArtifactID();
420 caseDB.acquireSingleUserCaseReadLock();
421 try (CaseDbConnection con = caseDB.getConnection();
422 Statement stmt = con.createStatement();
423 ResultSet results = stmt.executeQuery(query);) {
424 while (results.next()) {
425 eventIDs.add(results.getLong("event_id"));//NON-NLS
426 }
427 } catch (SQLException ex) {
428 throw new TskCoreException("Error executing getEventIDsForArtifact query.", ex); // NON-NLS
429 } finally {
430 caseDB.releaseSingleUserCaseReadLock();
431 }
432 return eventIDs;
433 }
434
448 public Set<Long> getEventIDsForContent(Content content, boolean includeDerivedArtifacts) throws TskCoreException {
449 caseDB.acquireSingleUserCaseWriteLock();
450 try (CaseDbConnection conn = caseDB.getConnection()) {
451 return getEventAndDescriptionIDs(conn, content.getId(), includeDerivedArtifacts).keySet();
452 } finally {
453 caseDB.releaseSingleUserCaseWriteLock();
454 }
455 }
456
475 private Long addEventDescription(long dataSourceObjId, long fileObjId, Long artifactID,
476 String fullDescription, String medDescription, String shortDescription,
477 boolean hasHashHits, boolean tagged, CaseDbConnection connection) throws TskCoreException, DuplicateException {
478 String tableValuesClause
479 = "tsk_event_descriptions ( "
480 + "data_source_obj_id, content_obj_id, artifact_id, "
481 + " full_description, med_description, short_description, "
482 + " hash_hit, tagged "
483 + " ) VALUES "
484 + "(?, ?, ?, ?, ?, ?, ?, ?)";
485
486 String insertDescriptionSql = getSqlIgnoreConflict(tableValuesClause);
487
489 try {
490 PreparedStatement insertDescriptionStmt = connection.getPreparedStatement(insertDescriptionSql, PreparedStatement.RETURN_GENERATED_KEYS);
491 insertDescriptionStmt.clearParameters();
492 insertDescriptionStmt.setLong(1, dataSourceObjId);
493 insertDescriptionStmt.setLong(2, fileObjId);
494
495 if (artifactID == null) {
496 insertDescriptionStmt.setNull(3, Types.INTEGER);
497 } else {
498 insertDescriptionStmt.setLong(3, artifactID);
499 }
500
501 insertDescriptionStmt.setString(4, fullDescription);
502 insertDescriptionStmt.setString(5, medDescription);
503 insertDescriptionStmt.setString(6, shortDescription);
504 insertDescriptionStmt.setInt(7, booleanToInt(hasHashHits));
505 insertDescriptionStmt.setInt(8, booleanToInt(tagged));
506 int row = insertDescriptionStmt.executeUpdate();
507 // if no inserted rows, there is a conflict due to a duplicate event
508 // description. If that happens, return null as no id was inserted.
509 if (row < 1) {
510 return null;
511 }
512
513 try (ResultSet generatedKeys = insertDescriptionStmt.getGeneratedKeys()) {
514 if (generatedKeys.next()) {
515 return generatedKeys.getLong(1);
516 } else {
517 return null;
518 }
519 }
520 } catch (SQLException ex) {
521 throw new TskCoreException("Failed to insert event description.", ex); // NON-NLS
522 } finally {
523 caseDB.releaseSingleUserCaseWriteLock();
524 }
525 }
526
540 private Long getEventDescription(long dataSourceObjId, long fileObjId, Long artifactID,
541 String fullDescription, CaseDbConnection connection) throws TskCoreException {
542
543 String query = "SELECT event_description_id FROM tsk_event_descriptions "
544 + "WHERE data_source_obj_id = " + dataSourceObjId
545 + " AND content_obj_id = " + fileObjId
546 + " AND artifact_id " + (artifactID != null ? " = " + artifactID : "IS null")
547 + " AND full_description " + (fullDescription != null ? "= '"
548 + SleuthkitCase.escapeSingleQuotes(fullDescription) + "'" : "IS null");
549
550 caseDB.acquireSingleUserCaseReadLock();
551 try (ResultSet resultSet = connection.createStatement().executeQuery(query)) {
552
553 if (resultSet.next()) {
554 long id = resultSet.getLong(1);
555 return id;
556 }
557 } catch (SQLException ex) {
558 throw new TskCoreException(String.format("Failed to get description, dataSource=%d, fileObjId=%d, artifactId=%d", dataSourceObjId, fileObjId, artifactID), ex);
559 } finally {
560 caseDB.releaseSingleUserCaseReadLock();
561 }
562
563 return null;
564 }
565
566 Collection<TimelineEvent> addEventsForNewFile(AbstractFile file, CaseDbConnection connection) throws TskCoreException {
567 Set<TimelineEvent> events = addEventsForNewFileQuiet(file, connection);
568 events.stream()
569 .map(TimelineEventAddedEvent::new)
570 .forEach(caseDB::fireTSKEvent);
571
572 return events;
573 }
574
589 Set<TimelineEvent> addEventsForNewFileQuiet(AbstractFile file, CaseDbConnection connection) throws TskCoreException {
590 //gather time stamps into map
591 // if any of these events become deprecated in the future, filtering may need to occur.
592 Map<TimelineEventType, Long> timeMap = ImmutableMap.of(TimelineEventType.FILE_CREATED, file.getCrtime(),
593 TimelineEventType.FILE_ACCESSED, file.getAtime(),
594 TimelineEventType.FILE_CHANGED, file.getCtime(),
595 TimelineEventType.FILE_MODIFIED, file.getMtime());
596
597 /*
598 * If there are no legitimate ( greater than zero ) time stamps skip the
599 * rest of the event generation.
600 */
601 if (Collections.max(timeMap.values()) <= 0) {
602 return Collections.emptySet();
603 }
604
605 String description = file.getParentPath() + file.getName();
606 long fileObjId = file.getId();
607 Set<TimelineEvent> events = new HashSet<>();
608 caseDB.acquireSingleUserCaseWriteLock();
609 try {
610 Long descriptionID = addEventDescription(file.getDataSourceObjectId(), fileObjId, null,
611 description, null, null, false, false, connection);
612
613 if(descriptionID == null) {
614 descriptionID = getEventDescription(file.getDataSourceObjectId(), fileObjId, null, description, connection);
615 }
616 if(descriptionID != null) {
617 for (Map.Entry<TimelineEventType, Long> timeEntry : timeMap.entrySet()) {
618 Long time = timeEntry.getValue();
619 if (time > 0 && time < MAX_TIMESTAMP_TO_ADD) {// if the time is legitimate ( greater than zero and less then 12 years from current date) insert it
620 TimelineEventType type = timeEntry.getKey();
621 long eventID = addEventWithExistingDescription(time, type, descriptionID, connection);
622
623 /*
624 * Last two flags indicating hasTags and hasHashHits are
625 * both set to false with the assumption that this is not
626 * possible for a new file. See JIRA-5407
627 */
628 events.add(new TimelineEvent(eventID, descriptionID, fileObjId, null, time, type,
629 description, null, null, false, false));
630 } else {
631 if (time >= MAX_TIMESTAMP_TO_ADD) {
632 logger.log(Level.WARNING, String.format("Date/Time discarded from Timeline for %s for file %s with Id %d", timeEntry.getKey().getDisplayName(), file.getParentPath() + file.getName(), file.getId()));
633 }
634 }
635 }
636 } else {
637 throw new TskCoreException(String.format("Failed to get event description for file id = %d", fileObjId));
638 }
639 } catch (DuplicateException dupEx) {
640 logger.log(Level.SEVERE, "Attempt to make file event duplicate.", dupEx);
641 } finally {
642 caseDB.releaseSingleUserCaseWriteLock();
643 }
644
645 return events;
646 }
647
661 Set<TimelineEvent> addArtifactEvents(BlackboardArtifact artifact) throws TskCoreException {
662 Set<TimelineEvent> newEvents = new HashSet<>();
663
664 /*
665 * If the artifact is a TSK_TL_EVENT, use the TSK_TL_EVENT_TYPE
666 * attribute to determine its event type, but give it a generic
667 * description.
668 */
669 if (artifact.getArtifactTypeID() == TSK_TL_EVENT.getTypeID()) {
670 TimelineEventType eventType;//the type of the event to add.
671 BlackboardAttribute attribute = artifact.getAttribute(new BlackboardAttribute.Type(TSK_TL_EVENT_TYPE));
672 if (attribute == null) {
673 eventType = TimelineEventType.STANDARD_ARTIFACT_CATCH_ALL;
674 } else {
675 long eventTypeID = attribute.getValueLong();
676 eventType = eventTypeIDMap.getOrDefault(eventTypeID, TimelineEventType.STANDARD_ARTIFACT_CATCH_ALL);
677 }
678
679 try {
680 // @@@ This casting is risky if we change class hierarchy, but was expedient. Should move parsing to another class
681 addArtifactEvent(((TimelineEventArtifactTypeImpl) TimelineEventType.STANDARD_ARTIFACT_CATCH_ALL).makeEventDescription(artifact), eventType, artifact)
682 .ifPresent(newEvents::add);
683 } catch (DuplicateException ex) {
684 logger.log(Level.SEVERE, getDuplicateExceptionMessage(artifact, "Attempt to make a timeline event artifact duplicate"), ex);
685 }
686 } else {
687 /*
688 * If there are any event types configured to make descriptions
689 * automatically, use those.
690 */
691 Set<TimelineEventArtifactTypeImpl> eventTypesForArtifact = eventTypeIDMap.values().stream()
692 .filter(TimelineEventArtifactTypeImpl.class::isInstance)
693 .map(TimelineEventArtifactTypeImpl.class::cast)
694 .filter(eventType -> eventType.getArtifactTypeID() == artifact.getArtifactTypeID())
695 .collect(Collectors.toSet());
696
697 boolean duplicateExists = false;
698 for (TimelineEventArtifactTypeImpl eventType : eventTypesForArtifact) {
699 try {
700 addArtifactEvent(eventType.makeEventDescription(artifact), eventType, artifact)
701 .ifPresent(newEvents::add);
702 } catch (DuplicateException ex) {
703 duplicateExists = true;
704 logger.log(Level.SEVERE, getDuplicateExceptionMessage(artifact, "Attempt to make artifact event duplicate"), ex);
705 }
706 }
707
708 // if no other timeline events were created directly, then create new 'other' ones.
709 if (!duplicateExists && newEvents.isEmpty()) {
710 try {
711 addOtherEventDesc(artifact).ifPresent(newEvents::add);
712 } catch (DuplicateException ex) {
713 logger.log(Level.SEVERE, getDuplicateExceptionMessage(artifact, "Attempt to make 'other' artifact event duplicate"), ex);
714 }
715 }
716 }
717 newEvents.stream()
718 .map(TimelineEventAddedEvent::new)
719 .forEach(caseDB::fireTSKEvent);
720 return newEvents;
721 }
722
735 private String getDuplicateExceptionMessage(BlackboardArtifact artifact, String error) {
736 String artifactIDStr = null;
737 String sourceStr = null;
738
739 if (artifact != null) {
740 artifactIDStr = Long.toString(artifact.getId());
741
742 try {
743 sourceStr = artifact.getAttributes().stream()
744 .filter(attr -> attr != null && attr.getSources() != null && !attr.getSources().isEmpty())
745 .map(attr -> String.join(",", attr.getSources()))
746 .findFirst()
747 .orElse(null);
748 } catch (TskCoreException ex) {
749 logger.log(Level.WARNING, String.format("Could not fetch artifacts for artifact id: %d.", artifact.getId()), ex);
750 }
751 }
752
753 artifactIDStr = (artifactIDStr == null) ? "<null>" : artifactIDStr;
754 sourceStr = (sourceStr == null) ? "<null>" : sourceStr;
755
756 return String.format("%s (artifactID=%s, Source=%s).", error, artifactIDStr, sourceStr);
757 }
758
770 private Optional<TimelineEvent> addOtherEventDesc(BlackboardArtifact artifact) throws TskCoreException, DuplicateException {
771 if (artifact == null) {
772 return Optional.empty();
773 }
774
775 Long timeVal = artifact.getAttributes().stream()
776 .filter((attr) -> attr.getAttributeType().getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME)
777 .map(attr -> attr.getValueLong())
778 .findFirst()
779 .orElse(null);
780
781 if (timeVal == null) {
782 return Optional.empty();
783 }
784
785 String description = String.format("%s: %d", artifact.getDisplayName(), artifact.getId());
786
787 TimelineEventDescriptionWithTime evtWDesc = new TimelineEventDescriptionWithTime(timeVal, description, description, description);
788
789 TimelineEventType evtType = (ARTIFACT_TYPE_IDS.contains(artifact.getArtifactTypeID()))
790 ? TimelineEventType.STANDARD_ARTIFACT_CATCH_ALL
791 : TimelineEventType.CUSTOM_ARTIFACT_CATCH_ALL;
792
793 return addArtifactEvent(evtWDesc, evtType, artifact);
794 }
795
796
813 @Beta
815 TimelineEventType eventType, String shortDesc, String medDesc, String longDesc,
816 long dataSourceId, long contentId, Long artifactId, long time,
817 boolean hashHit, boolean tagged,
819 ) throws TskCoreException {
820 caseDB.acquireSingleUserCaseWriteLock();
821 try {
822 Long descriptionID = addEventDescription(dataSourceId, contentId, artifactId,
823 longDesc, medDesc, shortDesc, hashHit, tagged, trans.getConnection());
824
825 if (descriptionID == null) {
826 descriptionID = getEventDescription(dataSourceId, contentId, artifactId, longDesc, trans.getConnection());
827 }
828 if (descriptionID != null) {
829 long eventID = addEventWithExistingDescription(time, eventType, descriptionID, trans.getConnection());
830 TimelineEvent timelineEvt = new TimelineEvent(eventID, descriptionID, contentId, artifactId, time, eventType,
831 longDesc, medDesc, shortDesc, hashHit, tagged);
832
833 trans.registerTimelineEvent(new TimelineEventAddedEvent(timelineEvt));
834 return timelineEvt;
835 } else {
836 throw new TskCoreException(MessageFormat.format(
837 "Failed to get event description for [shortDesc: {0}, dataSourceId: {1}, contentId: {2}, artifactId: {3}]",
838 shortDesc, dataSourceId, contentId, artifactId == null ? "<null>" : artifactId));
839 }
840 } catch (DuplicateException dupEx) {
841 logger.log(Level.WARNING, "Attempt to make duplicate", dupEx);
842 return null;
843 } finally {
844 caseDB.releaseSingleUserCaseWriteLock();
845 }
846 }
847
861 private Optional<TimelineEvent> addArtifactEvent(TimelineEventDescriptionWithTime eventPayload,
862 TimelineEventType eventType, BlackboardArtifact artifact) throws TskCoreException, DuplicateException {
863
864 // make sure event payload is present
865 // only create event for a timeline event type if not deprecated
866 if (eventPayload == null || eventType.isDeprecated()) {
867 return Optional.empty();
868 }
869 long time = eventPayload.getTime();
870 // if the time is legitimate ( greater than or equal to zero or less than or equal to 12 years from present time) insert it into the db
871 if (time <= 0 || time >= MAX_TIMESTAMP_TO_ADD) {
872 if (time >= MAX_TIMESTAMP_TO_ADD) {
873 logger.log(Level.WARNING, String.format("Date/Time discarded from Timeline for %s for artifact %s with id %d", artifact.getDisplayName(), eventPayload.getDescription(TimelineLevelOfDetail.HIGH), artifact.getId()));
874 }
875 return Optional.empty();
876 }
877 String fullDescription = eventPayload.getDescription(TimelineLevelOfDetail.HIGH);
878 String medDescription = eventPayload.getDescription(TimelineLevelOfDetail.MEDIUM);
879 String shortDescription = eventPayload.getDescription(TimelineLevelOfDetail.LOW);
880 long artifactID = artifact.getArtifactID();
881 long fileObjId = artifact.getObjectID();
882 Long dataSourceObjectID = artifact.getDataSourceObjectID();
883
884 if(dataSourceObjectID == null) {
885 logger.log(Level.SEVERE, String.format("Failed to create timeline event for artifact (%d), artifact data source was null", artifact.getId()));
886 return Optional.empty();
887 }
888
889 AbstractFile file = caseDB.getAbstractFileById(fileObjId);
890 boolean hasHashHits = false;
891 // file will be null if source was data source or some non-file
892 if (file != null) {
893 hasHashHits = isNotEmpty(file.getHashSetNames());
894 }
895 boolean tagged = isNotEmpty(caseDB.getBlackboardArtifactTagsByArtifact(artifact));
896
897 TimelineEvent event;
898 caseDB.acquireSingleUserCaseWriteLock();
899 try (CaseDbConnection connection = caseDB.getConnection();) {
900
901 Long descriptionID = addEventDescription(dataSourceObjectID, fileObjId, artifactID,
902 fullDescription, medDescription, shortDescription,
903 hasHashHits, tagged, connection);
904
905 if(descriptionID == null) {
906 descriptionID = getEventDescription(dataSourceObjectID, fileObjId, artifactID,
907 fullDescription, connection);
908 }
909
910 if(descriptionID != null) {
911 long eventID = addEventWithExistingDescription(time, eventType, descriptionID, connection);
912
913 event = new TimelineEvent(eventID, dataSourceObjectID, fileObjId, artifactID,
914 time, eventType, fullDescription, medDescription, shortDescription,
915 hasHashHits, tagged);
916 } else {
917 throw new TskCoreException(String.format("Failed to get event description for file id = %d, artifactId %d", fileObjId, artifactID));
918 }
919
920 } finally {
921 caseDB.releaseSingleUserCaseWriteLock();
922 }
923 return Optional.of(event);
924 }
925
926 private long addEventWithExistingDescription(Long time, TimelineEventType type, long descriptionID, CaseDbConnection connection) throws TskCoreException, DuplicateException {
927 String tableValuesClause
928 = "tsk_events ( event_type_id, event_description_id , time) VALUES (?, ?, ?)";
929
930 String insertEventSql = getSqlIgnoreConflict(tableValuesClause);
931
932 caseDB.acquireSingleUserCaseWriteLock();
933 try {
934 PreparedStatement insertRowStmt = connection.getPreparedStatement(insertEventSql, Statement.RETURN_GENERATED_KEYS);
935 insertRowStmt.clearParameters();
936 insertRowStmt.setLong(1, type.getTypeID());
937 insertRowStmt.setLong(2, descriptionID);
938 insertRowStmt.setLong(3, time);
939 int row = insertRowStmt.executeUpdate();
940 // if no inserted rows, return null.
941 if (row < 1) {
942 throw new DuplicateException(String.format("An event already exists in the event table for this item [time: %s, type: %s, description: %d].",
943 time == null ? "<null>" : Long.toString(time),
944 type == null ? "<null>" : type.toString(),
945 descriptionID));
946 }
947
948 try (ResultSet generatedKeys = insertRowStmt.getGeneratedKeys();) {
949 if (generatedKeys.next()) {
950 return generatedKeys.getLong(1);
951 } else {
952 throw new DuplicateException(String.format("An event already exists in the event table for this item [time: %s, type: %s, description: %d].",
953 time == null ? "<null>" : Long.toString(time),
954 type == null ? "<null>" : type.toString(),
955 descriptionID));
956 }
957 }
958 } catch (SQLException ex) {
959 throw new TskCoreException("Failed to insert event for existing description.", ex); // NON-NLS
960 } finally {
961 caseDB.releaseSingleUserCaseWriteLock();
962 }
963 }
964
965 private Map<Long, Long> getEventAndDescriptionIDs(CaseDbConnection conn, long contentObjID, boolean includeArtifacts) throws TskCoreException {
966 return getEventAndDescriptionIDsHelper(conn, contentObjID, (includeArtifacts ? "" : " AND artifact_id IS NULL"));
967 }
968
969 private Map<Long, Long> getEventAndDescriptionIDs(CaseDbConnection conn, long contentObjID, Long artifactID) throws TskCoreException {
970 return getEventAndDescriptionIDsHelper(conn, contentObjID, " AND artifact_id = " + artifactID);
971 }
972
973 private Map<Long, Long> getEventAndDescriptionIDsHelper(CaseDbConnection con, long fileObjID, String artifactClause) throws TskCoreException {
974 //map from event_id to the event_description_id for that event.
975 Map<Long, Long> eventIDToDescriptionIDs = new HashMap<>();
976 String sql = "SELECT event_id, tsk_events.event_description_id"
977 + " FROM tsk_events "
978 + " LEFT JOIN tsk_event_descriptions ON ( tsk_events.event_description_id = tsk_event_descriptions.event_description_id )"
979 + " WHERE content_obj_id = " + fileObjID
980 + artifactClause;
981 try (Statement selectStmt = con.createStatement(); ResultSet executeQuery = selectStmt.executeQuery(sql);) {
982 while (executeQuery.next()) {
983 eventIDToDescriptionIDs.put(executeQuery.getLong("event_id"), executeQuery.getLong("event_description_id")); //NON-NLS
984 }
985 } catch (SQLException ex) {
986 throw new TskCoreException("Error getting event description ids for object id = " + fileObjID, ex);
987 }
988 return eventIDToDescriptionIDs;
989 }
990
1007 @Beta
1009 caseDB.acquireSingleUserCaseWriteLock();
1010 try (CaseDbConnection conn = caseDB.getConnection()) {
1011 Map<Long, Long> eventIDs = getEventAndDescriptionIDs(conn, content.getId(), false);
1012 updateEventSourceTaggedFlag(conn, eventIDs.values(), 1);
1013 return eventIDs.keySet();
1014 } finally {
1015 caseDB.releaseSingleUserCaseWriteLock();
1016 }
1017 }
1018
1036 @Beta
1038 caseDB.acquireSingleUserCaseWriteLock();
1039 try (CaseDbConnection conn = caseDB.getConnection()) {
1040 if (caseDB.getContentTagsByContent(content).isEmpty()) {
1041 Map<Long, Long> eventIDs = getEventAndDescriptionIDs(conn, content.getId(), false);
1042 updateEventSourceTaggedFlag(conn, eventIDs.values(), 0);
1043 return eventIDs.keySet();
1044 } else {
1045 return Collections.emptySet();
1046 }
1047 } finally {
1048 caseDB.releaseSingleUserCaseWriteLock();
1049 }
1050 }
1051
1064 caseDB.acquireSingleUserCaseWriteLock();
1065 try (CaseDbConnection conn = caseDB.getConnection()) {
1066 Map<Long, Long> eventIDs = getEventAndDescriptionIDs(conn, artifact.getObjectID(), artifact.getArtifactID());
1067 updateEventSourceTaggedFlag(conn, eventIDs.values(), 1);
1068 return eventIDs.keySet();
1069 } finally {
1070 caseDB.releaseSingleUserCaseWriteLock();
1071 }
1072 }
1073
1087 caseDB.acquireSingleUserCaseWriteLock();
1088 try (CaseDbConnection conn = caseDB.getConnection()) {
1089 if (caseDB.getBlackboardArtifactTagsByArtifact(artifact).isEmpty()) {
1090 Map<Long, Long> eventIDs = getEventAndDescriptionIDs(conn, artifact.getObjectID(), artifact.getArtifactID());
1091 updateEventSourceTaggedFlag(conn, eventIDs.values(), 0);
1092 return eventIDs.keySet();
1093 } else {
1094 return Collections.emptySet();
1095 }
1096 } finally {
1097 caseDB.releaseSingleUserCaseWriteLock();
1098 }
1099 }
1100
1101 private void updateEventSourceTaggedFlag(CaseDbConnection conn, Collection<Long> eventDescriptionIDs, int flagValue) throws TskCoreException {
1102 if (eventDescriptionIDs.isEmpty()) {
1103 return;
1104 }
1105
1106 String sql = "UPDATE tsk_event_descriptions SET tagged = " + flagValue + " WHERE event_description_id IN (" + buildCSVString(eventDescriptionIDs) + ")"; //NON-NLS
1107 try (Statement updateStatement = conn.createStatement()) {
1108 updateStatement.executeUpdate(sql);
1109 } catch (SQLException ex) {
1110 throw new TskCoreException("Error marking content events tagged: " + sql, ex);//NON-NLS
1111 }
1112 }
1113
1128 public Set<Long> updateEventsForHashSetHit(Content content) throws TskCoreException {
1129 caseDB.acquireSingleUserCaseWriteLock();
1130 try (CaseDbConnection con = caseDB.getConnection(); Statement updateStatement = con.createStatement();) {
1131 Map<Long, Long> eventIDs = getEventAndDescriptionIDs(con, content.getId(), true);
1132 if (!eventIDs.isEmpty()) {
1133 String sql = "UPDATE tsk_event_descriptions SET hash_hit = 1" + " WHERE event_description_id IN (" + buildCSVString(eventIDs.values()) + ")"; //NON-NLS
1134 try {
1135 updateStatement.executeUpdate(sql); //NON-NLS
1136 return eventIDs.keySet();
1137 } catch (SQLException ex) {
1138 throw new TskCoreException("Error setting hash_hit of events.", ex);//NON-NLS
1139 }
1140 } else {
1141 return eventIDs.keySet();
1142 }
1143 } catch (SQLException ex) {
1144 throw new TskCoreException("Error setting hash_hit of events.", ex);//NON-NLS
1145 } finally {
1146 caseDB.releaseSingleUserCaseWriteLock();
1147 }
1148 }
1149
1150 void rollBackTransaction(SleuthkitCase.CaseDbTransaction trans) throws TskCoreException {
1151 trans.rollback();
1152 }
1153
1173 public Map<TimelineEventType, Long> countEventsByType(Long startTime, Long endTime, TimelineFilter.RootFilter filter, TimelineEventType.HierarchyLevel typeHierachyLevel) throws TskCoreException {
1174 long adjustedEndTime = Objects.equals(startTime, endTime) ? endTime + 1 : endTime;
1175 //do we want the base or subtype column of the databse
1176 String typeColumn = typeColumnHelper(TimelineEventType.HierarchyLevel.EVENT.equals(typeHierachyLevel));
1177
1178 String queryString = "SELECT count(DISTINCT tsk_events.event_id) AS count, " + typeColumn//NON-NLS
1179 + " FROM " + getAugmentedEventsTablesSQL(filter)//NON-NLS
1180 + " WHERE time >= " + startTime + " AND time < " + adjustedEndTime + " AND " + getSQLWhere(filter) // NON-NLS
1181 + " GROUP BY " + typeColumn; // NON-NLS
1182
1183 caseDB.acquireSingleUserCaseReadLock();
1184 try (CaseDbConnection con = caseDB.getConnection();
1185 Statement stmt = con.createStatement();
1186 ResultSet results = stmt.executeQuery(queryString);) {
1187 Map<TimelineEventType, Long> typeMap = new HashMap<>();
1188 while (results.next()) {
1189 int eventTypeID = results.getInt(typeColumn);
1190 TimelineEventType eventType = getEventType(eventTypeID)
1191 .orElseThrow(() -> newEventTypeMappingException(eventTypeID));//NON-NLS
1192
1193 typeMap.put(eventType, results.getLong("count")); // NON-NLS
1194 }
1195 return typeMap;
1196 } catch (SQLException ex) {
1197 throw new TskCoreException("Error getting count of events from db: " + queryString, ex); // NON-NLS
1198 } finally {
1199 caseDB.releaseSingleUserCaseReadLock();
1200 }
1201 }
1202
1203 private static TskCoreException newEventTypeMappingException(int eventTypeID) {
1204 return new TskCoreException("Error mapping event type id " + eventTypeID + " to EventType.");//NON-NLS
1205 }
1206
1220 static private String getAugmentedEventsTablesSQL(TimelineFilter.RootFilter filter) {
1221 TimelineFilter.FileTypesFilter fileTypesFitler = filter.getFileTypesFilter();
1222 boolean needsMimeTypes = fileTypesFitler != null && fileTypesFitler.hasSubFilters();
1223
1224 return getAugmentedEventsTablesSQL(needsMimeTypes);
1225 }
1226
1241 static private String getAugmentedEventsTablesSQL(boolean needMimeTypes) {
1242 /*
1243 * Regarding the timeline event tables schema, note that several columns
1244 * in the tsk_event_descriptions table seem, at first glance, to be
1245 * attributes of events rather than their descriptions and would appear
1246 * to belong in tsk_events table instead. The rationale for putting the
1247 * data source object ID, content object ID, artifact ID and the flags
1248 * indicating whether or not the event source has a hash set hit or is
1249 * tagged were motivated by the fact that these attributes are identical
1250 * for each event in a set of file system file MAC time events. The
1251 * decision was made to avoid duplication and save space by placing this
1252 * data in the tsk_event-descriptions table.
1253 */
1254 return "( SELECT event_id, time, tsk_event_descriptions.data_source_obj_id, content_obj_id, artifact_id, "
1255 + " full_description, med_description, short_description, tsk_events.event_type_id, super_type_id,"
1256 + " hash_hit, tagged "
1257 + (needMimeTypes ? ", mime_type" : "")
1258 + " FROM tsk_events "
1259 + " JOIN tsk_event_descriptions ON ( tsk_event_descriptions.event_description_id = tsk_events.event_description_id)"
1260 + " JOIN tsk_event_types ON (tsk_events.event_type_id = tsk_event_types.event_type_id ) "
1261 + (needMimeTypes ? " LEFT OUTER JOIN tsk_files "
1262 + " ON (tsk_event_descriptions.content_obj_id = tsk_files.obj_id)"
1263 : "")
1264 + ") AS tsk_events";
1265 }
1266
1274 private static int booleanToInt(boolean value) {
1275 return value ? 1 : 0;
1276 }
1277
1278 private static boolean intToBoolean(int value) {
1279 return value != 0;
1280 }
1281
1294 public List<TimelineEvent> getEvents(Interval timeRange, TimelineFilter.RootFilter filter) throws TskCoreException {
1295 List<TimelineEvent> events = new ArrayList<>();
1296
1297 Long startTime = timeRange.getStartMillis() / 1000;
1298 Long endTime = timeRange.getEndMillis() / 1000;
1299
1300 if (Objects.equals(startTime, endTime)) {
1301 endTime++; //make sure end is at least 1 millisecond after start
1302 }
1303
1304 if (filter == null) {
1305 return events;
1306 }
1307
1308 if (endTime < startTime) {
1309 return events;
1310 }
1311
1312 //build dynamic parts of query
1313 String querySql = "SELECT time, content_obj_id, data_source_obj_id, artifact_id, " // NON-NLS
1314 + " event_id, " //NON-NLS
1315 + " hash_hit, " //NON-NLS
1316 + " tagged, " //NON-NLS
1317 + " event_type_id, super_type_id, "
1318 + " full_description, med_description, short_description " // NON-NLS
1319 + " FROM " + getAugmentedEventsTablesSQL(filter) // NON-NLS
1320 + " WHERE time >= " + startTime + " AND time < " + endTime + " AND " + getSQLWhere(filter) // NON-NLS
1321 + " ORDER BY time"; // NON-NLS
1322
1323 caseDB.acquireSingleUserCaseReadLock();
1324 try (CaseDbConnection con = caseDB.getConnection();
1325 Statement stmt = con.createStatement();
1326 ResultSet resultSet = stmt.executeQuery(querySql);) {
1327
1328 while (resultSet.next()) {
1329 int eventTypeID = resultSet.getInt("event_type_id");
1330 TimelineEventType eventType = getEventType(eventTypeID).orElseThrow(()
1331 -> new TskCoreException("Error mapping event type id " + eventTypeID + "to EventType."));//NON-NLS
1332
1333 TimelineEvent event = new TimelineEvent(
1334 resultSet.getLong("event_id"), // NON-NLS
1335 resultSet.getLong("data_source_obj_id"), // NON-NLS
1336 resultSet.getLong("content_obj_id"), // NON-NLS
1337 resultSet.getLong("artifact_id"), // NON-NLS
1338 resultSet.getLong("time"), // NON-NLS
1339 eventType,
1340 resultSet.getString("full_description"), // NON-NLS
1341 resultSet.getString("med_description"), // NON-NLS
1342 resultSet.getString("short_description"), // NON-NLS
1343 resultSet.getInt("hash_hit") != 0, //NON-NLS
1344 resultSet.getInt("tagged") != 0);
1345
1346 events.add(event);
1347 }
1348
1349 } catch (SQLException ex) {
1350 throw new TskCoreException("Error getting events from db: " + querySql, ex); // NON-NLS
1351 } finally {
1352 caseDB.releaseSingleUserCaseReadLock();
1353 }
1354
1355 return events;
1356 }
1357
1365 private static String typeColumnHelper(final boolean useSubTypes) {
1366 return useSubTypes ? "event_type_id" : "super_type_id"; //NON-NLS
1367 }
1368
1377 String getSQLWhere(TimelineFilter.RootFilter filter) {
1378
1379 String result;
1380 if (filter == null) {
1381 return getTrueLiteral();
1382 } else {
1383 result = filter.getSQLWhere(this);
1384 }
1385
1386 return result;
1387 }
1388
1400 private String getSqlIgnoreConflict(String insertTableValues) throws TskCoreException {
1401 switch (caseDB.getDatabaseType()) {
1402 case POSTGRESQL:
1403 return "INSERT INTO " + insertTableValues + " ON CONFLICT DO NOTHING";
1404 case SQLITE:
1405 return "INSERT OR IGNORE INTO " + insertTableValues;
1406 default:
1407 throw new TskCoreException("Unknown DB Type: " + caseDB.getDatabaseType().name());
1408 }
1409 }
1410
1411 private String getTrueLiteral() {
1412 switch (caseDB.getDatabaseType()) {
1413 case POSTGRESQL:
1414 return "TRUE";//NON-NLS
1415 case SQLITE:
1416 return "1";//NON-NLS
1417 default:
1418 throw new UnsupportedOperationException("Unsupported DB type: " + caseDB.getDatabaseType().name());//NON-NLS
1419
1420 }
1421 }
1422
1427 final static public class TimelineEventAddedEvent {
1428
1429 private final TimelineEvent addedEvent;
1430
1432 return addedEvent;
1433 }
1434
1436 this.addedEvent = event;
1437 }
1438 }
1439
1443 private static class DuplicateException extends Exception {
1444
1445 private static final long serialVersionUID = 1L;
1446
1452 DuplicateException(String message) {
1453 super(message);
1454 }
1455 }
1456}
Set< Long > updateEventsForArtifactTagAdded(BlackboardArtifact artifact)
Set< Long > updateEventsForHashSetHit(Content content)
List< Long > getEventIDs(Interval timeRange, TimelineFilter.RootFilter filter)
Set< Long > updateEventsForArtifactTagDeleted(BlackboardArtifact artifact)
ImmutableList< TimelineEventType > getEventTypes()
List< TimelineEvent > getEvents(Interval timeRange, TimelineFilter.RootFilter filter)
TimelineEvent getEventById(long eventID)
Set< Long > updateEventsForContentTagAdded(Content content)
Interval getSpanningInterval(Interval timeRange, TimelineFilter.RootFilter filter, DateTimeZone timeZone)
List< Long > getEventIDsForArtifact(BlackboardArtifact artifact)
Optional< TimelineEventType > getEventType(long eventTypeID)
Set< Long > getEventIDsForContent(Content content, boolean includeDerivedArtifacts)
Interval getSpanningInterval(Collection< Long > eventIDs)
TimelineEvent addTimelineEvent(TimelineEventType eventType, String shortDesc, String medDesc, String longDesc, long dataSourceId, long contentId, Long artifactId, long time, boolean hashHit, boolean tagged, CaseDbTransaction trans)
Set< Long > updateEventsForContentTagDeleted(Content content)
Map< TimelineEventType, Long > countEventsByType(Long startTime, Long endTime, TimelineFilter.RootFilter filter, TimelineEventType.HierarchyLevel typeHierachyLevel)
SortedSet<? extends TimelineEventType > getChildren()

Copyright © 2011-2024 Brian Carrier. (carrier -at- sleuthkit -dot- org)
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.