19 package org.sleuthkit.autopsy.timeline.db;
21 import com.google.common.collect.HashMultimap;
22 import com.google.common.collect.SetMultimap;
23 import java.nio.file.Paths;
24 import java.sql.Connection;
25 import java.sql.DriverManager;
26 import java.sql.PreparedStatement;
27 import java.sql.ResultSet;
28 import java.sql.SQLException;
29 import java.sql.Statement;
30 import java.sql.Types;
31 import java.util.ArrayList;
32 import java.util.Arrays;
33 import java.util.Collection;
34 import java.util.Collections;
35 import java.util.Comparator;
36 import java.util.HashMap;
37 import java.util.HashSet;
38 import java.util.Iterator;
39 import java.util.List;
41 import java.util.Objects;
43 import java.util.TimeZone;
44 import java.util.concurrent.locks.Lock;
45 import java.util.concurrent.locks.ReentrantReadWriteLock;
46 import java.util.logging.Level;
47 import java.util.stream.Collectors;
48 import javax.annotation.Nonnull;
49 import javax.annotation.Nullable;
50 import org.apache.commons.lang3.StringUtils;
51 import org.apache.commons.lang3.tuple.ImmutablePair;
52 import org.joda.time.DateTimeZone;
53 import org.joda.time.Interval;
54 import org.joda.time.Period;
76 import org.sqlite.SQLiteJDBCLoader;
92 Class.forName(
"org.sqlite.JDBC");
93 }
catch (ClassNotFoundException ex) {
94 LOGGER.log(Level.SEVERE,
"Failed to load sqlite JDBC driver", ex);
110 }
catch (SQLException ex) {
111 LOGGER.log(Level.SEVERE,
"sql error creating database connection", ex);
113 }
catch (Exception ex) {
114 LOGGER.log(Level.SEVERE,
"error creating database connection", ex);
119 private volatile Connection
con;
145 private final Lock
DBLock =
new ReentrantReadWriteLock(
true).writeLock();
149 this.dbPath = Paths.get(autoCase.getCaseDirectory(),
"events.db").toString();
167 }
catch (SQLException ex) {
168 LOGGER.log(Level.WARNING,
"Failed to close connection to evetns.db", ex);
176 try (Statement stmt = con.createStatement();
177 ResultSet rs = stmt.executeQuery(
"SELECT Min(time), Max(time) FROM events WHERE event_id IN (" + StringUtils.join(eventIDs,
", ") +
")");) {
179 return new Interval(rs.getLong(
"Min(time)") * 1000, (rs.getLong(
"Max(time)") + 1) * 1000, DateTimeZone.UTC);
181 }
catch (SQLException ex) {
182 LOGGER.log(Level.SEVERE,
"Error executing get spanning interval query.", ex);
189 EventTransaction beginTransaction() {
190 return new EventTransaction();
193 void commitTransaction(EventTransaction tr) {
195 throw new IllegalArgumentException(
"can't close already closed transaction");
204 int countAllEvents() {
206 try (ResultSet rs = countAllEventsStmt.executeQuery()) {
208 return rs.getInt(
"count");
210 }
catch (SQLException ex) {
211 LOGGER.log(Level.SEVERE,
"Error counting all events", ex);
227 Map<EventType, Long> countEventsByType(ZoomParams params) {
228 if (params.getTimeRange() != null) {
229 return countEventsByType(params.getTimeRange().getStartMillis() / 1000,
230 params.getTimeRange().getEndMillis() / 1000,
231 params.getFilter(), params.getTypeZoomLevel());
233 return Collections.emptyMap();
245 Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) {
246 HashMap<String, Long> counts =
new HashMap<>();
248 try (Statement createStatement = con.createStatement();
249 ResultSet rs = createStatement.executeQuery(
"SELECT tag_name_display_name, COUNT(DISTINCT tag_id) AS count FROM tags"
250 +
" WHERE event_id IN (" + StringUtils.join(eventIDsWithTags,
", ") +
")"
251 +
" GROUP BY tag_name_id"
252 +
" ORDER BY tag_name_display_name");) {
254 counts.put(rs.getString(
"tag_name_display_name"), rs.getLong(
"count"));
256 }
catch (SQLException ex) {
257 LOGGER.log(Level.SEVERE,
"Failed to get tag counts by tag name.", ex);
268 void reInitializeDB() {
271 dropEventsTableStmt.executeUpdate();
272 dropHashSetHitsTableStmt.executeUpdate();
273 dropHashSetsTableStmt.executeUpdate();
274 dropTagsTableStmt.executeUpdate();
275 dropDBInfoTableStmt.executeUpdate();
277 }
catch (SQLException ex) {
278 LOGGER.log(Level.SEVERE,
"could not drop old tables", ex);
288 void reInitializeTags() {
291 dropTagsTableStmt.executeUpdate();
293 }
catch (SQLException ex) {
294 LOGGER.log(Level.SEVERE,
"could not drop old tags table", ex);
300 Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter) {
301 long start = timeRange.getStartMillis() / 1000;
302 long end = timeRange.getEndMillis() / 1000;
303 final String sqlWhere = SQLHelper.getSQLWhere(filter);
305 try (Statement stmt = con.createStatement();
306 ResultSet rs = stmt.executeQuery(
" SELECT (SELECT Max(time) FROM events " + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) +
" WHERE time <=" + start +
" AND " + sqlWhere +
") AS start,"
307 +
"(SELECT Min(time) FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) +
" WHERE time >= " + end +
" AND " + sqlWhere +
") AS end")) {
310 long start2 = rs.getLong(
"start");
311 long end2 = rs.getLong(
"end");
316 return new Interval(start2 * 1000, (end2 + 1) * 1000, TimeLineController.getJodaTimeZone());
318 }
catch (SQLException ex) {
319 LOGGER.log(Level.SEVERE,
"Failed to get MIN time.", ex);
326 TimeLineEvent getEventById(Long eventID) {
327 TimeLineEvent result = null;
330 getEventByIDStmt.clearParameters();
331 getEventByIDStmt.setLong(1, eventID);
332 try (ResultSet rs = getEventByIDStmt.executeQuery()) {
338 }
catch (SQLException sqlEx) {
339 LOGGER.log(Level.SEVERE,
"exception while querying for event with id = " + eventID, sqlEx);
346 Set<Long> getEventIDs(Interval timeRange, RootFilter filter) {
347 return getEventIDs(timeRange.getStartMillis() / 1000, timeRange.getEndMillis() / 1000, filter);
350 Set<Long> getEventIDs(Long startTime, Long endTime, RootFilter filter) {
351 if (Objects.equals(startTime, endTime)) {
354 Set<Long> resultIDs =
new HashSet<>();
357 final String query =
"SELECT events.event_id AS event_id FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) +
" WHERE time >= " + startTime +
" AND time <" + endTime +
" AND " + SQLHelper.getSQLWhere(filter);
358 try (Statement stmt = con.createStatement();
359 ResultSet rs = stmt.executeQuery(query)) {
361 resultIDs.add(rs.getLong(
"event_id"));
364 }
catch (SQLException sqlEx) {
365 LOGGER.log(Level.SEVERE,
"failed to execute query for event ids in range", sqlEx);
377 boolean hasNewColumns() {
379 && (getDataSourceIDs().isEmpty() ==
false);
382 Set<Long> getDataSourceIDs() {
383 HashSet<Long> hashSet =
new HashSet<>();
385 try (ResultSet rs = getDataSourceIDsStmt.executeQuery()) {
387 long datasourceID = rs.getLong(
"datasource_id");
388 hashSet.add(datasourceID);
390 }
catch (SQLException ex) {
391 LOGGER.log(Level.SEVERE,
"Failed to get MAX time.", ex);
398 Map<Long, String> getHashSetNames() {
399 Map<Long, String> hashSets =
new HashMap<>();
401 try (ResultSet rs = getHashSetNamesStmt.executeQuery();) {
403 long hashSetID = rs.getLong(
"hash_set_id");
404 String hashSetName = rs.getString(
"hash_set_name");
405 hashSets.put(hashSetID, hashSetName);
407 }
catch (SQLException ex) {
408 LOGGER.log(Level.SEVERE,
"Failed to get hash sets.", ex);
412 return Collections.unmodifiableMap(hashSets);
417 try (Statement createStatement = con.createStatement()) {
418 boolean b = createStatement.execute(
"analyze; analyze sqlite_master;");
419 }
catch (SQLException ex) {
420 LOGGER.log(Level.SEVERE,
"Failed to analyze events db.", ex);
431 try (ResultSet rs = getMaxTimeStmt.executeQuery()) {
433 return rs.getLong(
"max");
435 }
catch (SQLException ex) {
436 LOGGER.log(Level.SEVERE,
"Failed to get MAX time.", ex);
448 try (ResultSet rs = getMinTimeStmt.executeQuery()) {
450 return rs.getLong(
"min");
452 }
catch (SQLException ex) {
453 LOGGER.log(Level.SEVERE,
"Failed to get MIN time.", ex);
466 final synchronized void initializeDB() {
469 if (con == null || con.isClosed()) {
470 con = DriverManager.getConnection(
"jdbc:sqlite:" + dbPath);
472 }
catch (SQLException ex) {
473 LOGGER.log(Level.SEVERE,
"Failed to open connection to events.db", ex);
478 }
catch (SQLException ex) {
479 LOGGER.log(Level.SEVERE,
"problem accessing database", ex);
485 try (Statement stmt = con.createStatement()) {
486 String sql =
"CREATE TABLE if not exists db_info "
489 +
"PRIMARY KEY (key))";
491 }
catch (SQLException ex) {
492 LOGGER.log(Level.SEVERE,
"problem creating db_info table", ex);
495 try (Statement stmt = con.createStatement()) {
496 String sql =
"CREATE TABLE if not exists events "
497 +
" (event_id INTEGER PRIMARY KEY, "
498 +
" datasource_id INTEGER, "
499 +
" file_id INTEGER, "
500 +
" artifact_id INTEGER, "
502 +
" sub_type INTEGER, "
503 +
" base_type INTEGER, "
504 +
" full_description TEXT, "
505 +
" med_description TEXT, "
506 +
" short_description TEXT, "
507 +
" known_state INTEGER,"
508 +
" hash_hit INTEGER,"
509 +
" tagged INTEGER)";
511 }
catch (SQLException ex) {
512 LOGGER.log(Level.SEVERE,
"problem creating database table", ex);
516 try (Statement stmt = con.createStatement()) {
517 String sql =
"ALTER TABLE events ADD COLUMN datasource_id INTEGER";
519 }
catch (SQLException ex) {
520 LOGGER.log(Level.SEVERE,
"problem upgrading events table", ex);
524 try (Statement stmt = con.createStatement()) {
525 String sql =
"ALTER TABLE events ADD COLUMN tagged INTEGER";
527 }
catch (SQLException ex) {
528 LOGGER.log(Level.SEVERE,
"problem upgrading events table", ex);
533 try (Statement stmt = con.createStatement()) {
534 String sql =
"ALTER TABLE events ADD COLUMN hash_hit INTEGER";
536 }
catch (SQLException ex) {
537 LOGGER.log(Level.SEVERE,
"problem upgrading events table", ex);
541 try (Statement stmt = con.createStatement()) {
542 String sql =
"CREATE TABLE if not exists hash_sets "
543 +
"( hash_set_id INTEGER primary key,"
544 +
" hash_set_name VARCHAR(255) UNIQUE NOT NULL)";
546 }
catch (SQLException ex) {
547 LOGGER.log(Level.SEVERE,
"problem creating hash_sets table", ex);
550 try (Statement stmt = con.createStatement()) {
551 String sql =
"CREATE TABLE if not exists hash_set_hits "
552 +
"(hash_set_id INTEGER REFERENCES hash_sets(hash_set_id) not null, "
553 +
" event_id INTEGER REFERENCES events(event_id) not null, "
554 +
" PRIMARY KEY (hash_set_id, event_id))";
556 }
catch (SQLException ex) {
557 LOGGER.log(Level.SEVERE,
"problem creating hash_set_hits table", ex);
562 createIndex(
"events", Arrays.asList(
"datasource_id"));
563 createIndex(
"events", Arrays.asList(
"event_id",
"hash_hit"));
564 createIndex(
"events", Arrays.asList(
"event_id",
"tagged"));
566 createIndex(
"events", Arrays.asList(
"artifact_id"));
567 createIndex(
"events", Arrays.asList(
"sub_type",
"short_description",
"time"));
568 createIndex(
"events", Arrays.asList(
"base_type",
"short_description",
"time"));
570 createIndex(
"events", Arrays.asList(
"known_state"));
574 "INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) "
575 +
"VALUES (?,?,?,?,?,?,?,?,?,?,?,?)");
576 getHashSetNamesStmt =
prepareStatement(
"SELECT hash_set_id, hash_set_name FROM hash_sets");
577 getDataSourceIDsStmt =
prepareStatement(
"SELECT DISTINCT datasource_id FROM events WHERE datasource_id != 0");
580 getEventByIDStmt =
prepareStatement(
"SELECT * FROM events WHERE event_id = ?");
581 insertHashSetStmt =
prepareStatement(
"INSERT OR IGNORE INTO hash_sets (hash_set_name) values (?)");
582 selectHashSetStmt =
prepareStatement(
"SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?");
583 insertHashHitStmt =
prepareStatement(
"INSERT OR IGNORE INTO hash_set_hits (hash_set_id, event_id) values (?,?)");
584 insertTagStmt =
prepareStatement(
"INSERT OR IGNORE INTO tags (tag_id, tag_name_id,tag_name_display_name, event_id) values (?,?,?,?)");
586 countAllEventsStmt =
prepareStatement(
"SELECT count(*) AS count FROM events");
588 dropHashSetHitsTableStmt =
prepareStatement(
"DROP TABLE IF EXISTS hash_set_hits");
592 selectNonArtifactEventIDsByObjectIDStmt =
prepareStatement(
"SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS NULL");
593 selectEventIDsBYObjectAndArtifactIDStmt =
prepareStatement(
"SELECT event_id FROM events WHERE file_id == ? AND artifact_id = ?");
594 }
catch (SQLException sQLException) {
595 LOGGER.log(Level.SEVERE,
"failed to prepareStatment", sQLException);
607 try (Statement stmt = con.createStatement()) {
608 String sql =
"CREATE TABLE IF NOT EXISTS tags "
609 +
"(tag_id INTEGER NOT NULL,"
610 +
" tag_name_id INTEGER NOT NULL, "
611 +
" tag_name_display_name TEXT NOT NULL, "
612 +
" event_id INTEGER REFERENCES events(event_id) NOT NULL, "
613 +
" PRIMARY KEY (event_id, tag_name_id))";
615 }
catch (SQLException ex) {
616 LOGGER.log(Level.SEVERE,
"problem creating tags table", ex);
625 private void createIndex(
final String tableName,
final List<String> columnList) {
626 String indexColumns = columnList.stream().collect(Collectors.joining(
",",
"(",
")"));
627 String indexName = tableName +
"_" + StringUtils.join(columnList,
"_") +
"_idx";
628 try (Statement stmt = con.createStatement()) {
630 String sql =
"CREATE INDEX IF NOT EXISTS " + indexName +
" ON " + tableName + indexColumns;
632 }
catch (SQLException ex) {
633 LOGGER.log(Level.SEVERE,
"problem creating index " + indexName, ex);
643 try (Statement stmt = con.createStatement()) {
645 ResultSet executeQuery = stmt.executeQuery(
"PRAGMA table_info(events)");
646 while (executeQuery.next()) {
647 if (dbColumn.equals(executeQuery.getString(
"name"))) {
651 }
catch (SQLException ex) {
652 LOGGER.log(Level.SEVERE,
"problem executing pragma", ex);
669 void insertEvent(
long time,
EventType type,
long datasourceID,
long objID,
670 Long artifactID, String fullDescription, String medDescription,
671 String shortDescription, TskData.FileKnown known, Set<String> hashSets, List<? extends Tag> tags) {
673 EventTransaction transaction = beginTransaction();
674 insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, tags, transaction);
675 commitTransaction(transaction);
684 void insertEvent(
long time,
EventType type,
long datasourceID,
long objID,
685 Long artifactID, String fullDescription, String medDescription,
686 String shortDescription, TskData.FileKnown known, Set<String> hashSetNames,
687 List<? extends Tag> tags, EventTransaction transaction) {
689 if (transaction.isClosed()) {
690 throw new IllegalArgumentException(
"can't update database with closed transaction");
692 int typeNum = RootEventType.allTypes.indexOf(type);
699 insertRowStmt.clearParameters();
700 insertRowStmt.setLong(1, datasourceID);
701 insertRowStmt.setLong(2, objID);
702 if (artifactID != null) {
703 insertRowStmt.setLong(3, artifactID);
705 insertRowStmt.setNull(3, Types.NULL);
707 insertRowStmt.setLong(4, time);
710 insertRowStmt.setInt(5, typeNum);
712 insertRowStmt.setNull(5, Types.INTEGER);
715 insertRowStmt.setInt(6, superTypeNum);
716 insertRowStmt.setString(7, fullDescription);
717 insertRowStmt.setString(8, medDescription);
718 insertRowStmt.setString(9, shortDescription);
720 insertRowStmt.setByte(10, known == null ? TskData.FileKnown.UNKNOWN.getFileKnownValue() : known.getFileKnownValue());
722 insertRowStmt.setInt(11, hashSetNames.isEmpty() ? 0 : 1);
723 insertRowStmt.setInt(12, tags.isEmpty() ? 0 : 1);
725 insertRowStmt.executeUpdate();
727 try (ResultSet generatedKeys = insertRowStmt.getGeneratedKeys()) {
728 while (generatedKeys.next()) {
729 long eventID = generatedKeys.getLong(
"last_insert_rowid()");
730 for (String name : hashSetNames) {
733 insertHashSetStmt.setString(1, name);
734 insertHashSetStmt.executeUpdate();
738 selectHashSetStmt.setString(1, name);
739 try (ResultSet rs = selectHashSetStmt.executeQuery()) {
741 int hashsetID = rs.getInt(
"hash_set_id");
743 insertHashHitStmt.setInt(1, hashsetID);
744 insertHashHitStmt.setLong(2, eventID);
745 insertHashHitStmt.executeUpdate();
750 for (Tag tag : tags) {
758 }
catch (SQLException ex) {
759 LOGGER.log(Level.SEVERE,
"failed to insert event", ex);
778 Set<Long> addTag(
long objectID, @Nullable Long artifactID, Tag tag, EventTransaction transaction) {
779 if (transaction != null && transaction.isClosed()) {
780 throw new IllegalArgumentException(
"can't update database with closed transaction");
785 for (Long eventID : eventIDs) {
789 }
catch (SQLException ex) {
790 LOGGER.log(Level.SEVERE,
"failed to add tag to event", ex);
794 return Collections.emptySet();
808 private void insertTag(Tag tag,
long eventID)
throws SQLException {
811 insertTagStmt.clearParameters();
812 insertTagStmt.setLong(1, tag.getId());
813 insertTagStmt.setLong(2, tag.getName().getId());
814 insertTagStmt.setString(3, tag.getName().getDisplayName());
815 insertTagStmt.setLong(4, eventID);
816 insertTagStmt.executeUpdate();
834 Set<Long> deleteTag(
long objectID, @Nullable Long artifactID,
long tagID,
boolean stillTagged) {
838 deleteTagStmt.clearParameters();
839 deleteTagStmt.setLong(1, tagID);
840 deleteTagStmt.executeUpdate();
843 }
catch (SQLException ex) {
844 LOGGER.log(Level.SEVERE,
"failed to add tag to event", ex);
848 return Collections.emptySet();
871 private Set<Long>
markEventsTagged(
long objectID, @Nullable Long artifactID,
boolean tagged)
throws SQLException {
873 PreparedStatement selectStmt;
874 if (Objects.isNull(artifactID)) {
876 selectNonArtifactEventIDsByObjectIDStmt.clearParameters();
877 selectNonArtifactEventIDsByObjectIDStmt.setLong(1, objectID);
881 selectEventIDsBYObjectAndArtifactIDStmt.clearParameters();
882 selectEventIDsBYObjectAndArtifactIDStmt.setLong(1, objectID);
883 selectEventIDsBYObjectAndArtifactIDStmt.setLong(2, artifactID);
887 HashSet<Long> eventIDs =
new HashSet<>();
888 try (ResultSet executeQuery = selectStmt.executeQuery();) {
889 while (executeQuery.next()) {
890 eventIDs.add(executeQuery.getLong(
"event_id"));
895 try (Statement updateStatement = con.createStatement();) {
896 updateStatement.executeUpdate(
"UPDATE events SET tagged = " + (tagged ? 1 : 0)
897 +
" WHERE event_id IN (" + StringUtils.join(eventIDs,
",") +
")");
903 void rollBackTransaction(EventTransaction trans) {
908 for (PreparedStatement pStmt : preparedStatements) {
916 try (Statement statement = con.createStatement()) {
918 statement.execute(
"PRAGMA synchronous = OFF;");
921 statement.execute(
"PRAGMA count_changes = OFF;");
923 statement.execute(
"PRAGMA temp_store = MEMORY");
925 statement.execute(
"PRAGMA cache_size = 50000");
927 statement.execute(
"PRAGMA auto_vacuum = 0");
929 statement.execute(
"PRAGMA read_uncommitted = True;");
935 LOGGER.log(Level.INFO, String.format(
"sqlite-jdbc version %s loaded in %s mode",
936 SQLiteJDBCLoader.getVersion(), SQLiteJDBCLoader.isNativeMode() ?
"native" :
"pure-java"));
937 }
catch (Exception exception) {
938 LOGGER.log(Level.SEVERE,
"Failed to determine if sqlite-jdbc is loaded in native or pure-java mode.", exception);
944 rs.getLong(
"datasource_id"),
945 rs.getLong(
"file_id"),
946 rs.getLong(
"artifact_id"),
948 rs.getString(
"full_description"),
949 rs.getString(
"med_description"),
950 rs.getString(
"short_description"),
951 TskData.FileKnown.valueOf(rs.getByte(
"known_state")),
952 rs.getInt(
"hash_hit") != 0,
953 rs.getInt(
"tagged") != 0);
973 if (Objects.equals(startTime, endTime)) {
977 Map<EventType, Long> typeMap =
new HashMap<>();
983 final String queryString =
"SELECT count(DISTINCT events.event_id) AS count, " +
typeColumnHelper(useSubTypes)
984 +
" FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) +
" WHERE time >= " + startTime +
" AND time < " + endTime +
" AND " + SQLHelper.getSQLWhere(filter)
988 try (Statement stmt = con.createStatement();
989 ResultSet rs = stmt.executeQuery(queryString);) {
993 :
BaseTypes.values()[rs.getInt(
"base_type")];
995 typeMap.put(type, rs.getLong(
"count"));
998 }
catch (Exception ex) {
999 LOGGER.log(Level.SEVERE,
"Error getting count of events from db.", ex);
1017 List<EventStripe> getEventStripes(
ZoomParams params) {
1024 long start = timeRange.getStartMillis() / 1000;
1025 long end = timeRange.getEndMillis() / 1000;
1028 end = Math.max(end, start + 1);
1034 String strfTimeFormat = SQLHelper.getStrfTimeFormat(rangeInfo.
getPeriodSize());
1035 String descriptionColumn = SQLHelper.getDescriptionColumn(descriptionLOD);
1041 String query =
"SELECT strftime('" + strfTimeFormat +
"',time , 'unixepoch'" + timeZone +
") AS interval,"
1042 +
"\n group_concat(events.event_id) as event_ids,"
1043 +
"\n group_concat(CASE WHEN hash_hit = 1 THEN events.event_id ELSE NULL END) as hash_hits,"
1044 +
"\n group_concat(CASE WHEN tagged = 1 THEN events.event_id ELSE NULL END) as taggeds,"
1045 +
"\n min(time), max(time), " + typeColumn +
", " + descriptionColumn
1046 +
"\n FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter)
1047 +
"\n WHERE time >= " + start +
" AND time < " + end +
" AND " + SQLHelper.getSQLWhere(filter)
1048 +
"\n GROUP BY interval, " + typeColumn +
" , " + descriptionColumn
1049 +
"\n ORDER BY min(time)";
1053 LOGGER.log(Level.INFO,
"executing timeline query: {0}", query);
1060 List<EventCluster> events =
new ArrayList<>();
1063 try (Statement createStatement = con.createStatement();
1064 ResultSet rs = createStatement.executeQuery(query)) {
1068 }
catch (SQLException ex) {
1069 LOGGER.log(Level.SEVERE,
"Failed to get events with query: " + query, ex);
1092 String eventIDsString = rs.getString(
"event_ids");
1093 Set<Long> eventIDs = SQLHelper.unGroupConcat(eventIDsString, Long::valueOf);
1094 String description = rs.getString(SQLHelper.getDescriptionColumn(descriptionLOD));
1097 Set<Long> hashHits = SQLHelper.unGroupConcat(rs.getString(
"hash_hits"), Long::valueOf);
1098 Set<Long> tagged = SQLHelper.unGroupConcat(rs.getString(
"taggeds"), Long::valueOf);
1100 return new EventCluster(interval, type, eventIDs, hashHits, tagged,
1101 description, descriptionLOD);
1120 Map<EventType, SetMultimap< String, EventCluster>> typeMap =
new HashMap<>();
1123 typeMap.computeIfAbsent(aggregateEvent.getEventType(), eventType -> HashMultimap.create())
1124 .put(aggregateEvent.getDescription(), aggregateEvent);
1127 ArrayList<EventCluster> aggEvents =
new ArrayList<>();
1130 for (SetMultimap<String, EventCluster> descrMap : typeMap.values()) {
1132 for (String descr : descrMap.keySet()) {
1134 Iterator<EventCluster> iterator = descrMap.get(descr).stream()
1135 .sorted(Comparator.comparing(event -> event.getSpan().getStartMillis()))
1138 while (iterator.hasNext()) {
1144 if (gap == null || gap.toDuration().getMillis() <= timeUnitLength.toDurationFrom(gap.getStart()).getMillis() / 4) {
1149 aggEvents.add(current);
1153 aggEvents.add(current);
1158 Map<ImmutablePair<EventType, String>,
EventStripe> stripeDescMap =
new HashMap<>();
1161 stripeDescMap.
merge(ImmutablePair.of(eventCluster.getEventType(), eventCluster.getDescription()),
1169 return useSubTypes ?
"sub_type" :
"base_type";
1175 preparedStatements.add(prepareStatement);
1200 con.setAutoCommit(
false);
1201 }
catch (SQLException ex) {
1202 LOGGER.log(Level.SEVERE,
"failed to set auto-commit to to false", ex);
1212 }
catch (SQLException ex1) {
1213 LOGGER.log(Level.SEVERE,
"Exception while attempting to rollback!!", ex1);
1227 }
catch (SQLException ex) {
1228 LOGGER.log(Level.SEVERE,
"Error commiting events.db.", ex);
1237 con.setAutoCommit(
true);
1238 }
catch (SQLException ex) {
1239 LOGGER.log(Level.SEVERE,
"Error setting auto-commit to true.", ex);
static Version.Type getBuildType()
void insertTag(Tag tag, long eventID)
static List< EventStripe > mergeClustersToStripes(Period timeUnitLength, List< EventCluster > preMergedEvents)
static EventDB getEventDB(Case autoCase)
boolean hasDBColumn(@Nonnull final String dbColumn)
PreparedStatement getEventByIDStmt
Interval getSpanningInterval(Collection< Long > eventIDs)
PreparedStatement dropTagsTableStmt
PreparedStatement dropHashSetHitsTableStmt
PreparedStatement insertHashSetStmt
PreparedStatement selectEventIDsBYObjectAndArtifactIDStmt
PreparedStatement deleteTagStmt
static ReadOnlyObjectProperty< TimeZone > getTimeZone()
PreparedStatement dropEventsTableStmt
Map< EventType, Long > countEventsByType(Long startTime, Long endTime, RootFilter filter, EventTypeZoomLevel zoomLevel)
TimeLineEvent constructTimeLineEvent(ResultSet rs)
PreparedStatement selectNonArtifactEventIDsByObjectIDStmt
static String typeColumnHelper(final boolean useSubTypes)
EventCluster eventClusterHelper(ResultSet rs, boolean useSubTypes, DescriptionLoD descriptionLOD, TagsFilter filter)
boolean hasTaggedColumn()
PreparedStatement getMaxTimeStmt
PreparedStatement getMinTimeStmt
PreparedStatement prepareStatement(String queryString)
PreparedStatement insertTagStmt
PreparedStatement insertRowStmt
PreparedStatement dropHashSetsTableStmt
final Set< PreparedStatement > preparedStatements
static RangeDivisionInfo getRangeDivisionInfo(Interval timeRange)
static final List<?extends EventType > allTypes
PreparedStatement selectHashSetStmt
static EventStripe merge(EventStripe u, EventStripe v)
PreparedStatement getHashSetNamesStmt
boolean hasHashHitColumn()
EventTypeZoomLevel getTypeZoomLevel()
static DateTimeZone getJodaTimeZone()
DescriptionLoD getDescriptionLOD()
PreparedStatement dropDBInfoTableStmt
void createIndex(final String tableName, final List< String > columnList)
PreparedStatement getDataSourceIDsStmt
TagsFilter getTagsFilter()
static final org.sleuthkit.autopsy.coreutils.Logger LOGGER
synchronized static Logger getLogger(String name)
Set< Long > markEventsTagged(long objectID,@Nullable Long artifactID, boolean tagged)
static EventCluster merge(EventCluster cluster1, EventCluster cluster2)
TimeUnits getPeriodSize()
void initializeTagsTable()
PreparedStatement countAllEventsStmt
boolean hasDataSourceIDColumn()
PreparedStatement insertHashHitStmt