19 package org.sleuthkit.autopsy.timeline.db;
21 import com.google.common.collect.HashMultimap;
22 import com.google.common.collect.SetMultimap;
23 import java.nio.file.Paths;
24 import java.sql.Connection;
25 import java.sql.DriverManager;
26 import java.sql.PreparedStatement;
27 import java.sql.ResultSet;
28 import java.sql.SQLException;
29 import java.sql.Statement;
30 import java.sql.Types;
31 import java.util.ArrayList;
32 import java.util.Arrays;
33 import java.util.Collection;
34 import java.util.Collections;
35 import java.util.Comparator;
36 import java.util.HashMap;
37 import java.util.HashSet;
38 import java.util.Iterator;
39 import java.util.List;
41 import java.util.Objects;
43 import java.util.TimeZone;
44 import java.util.concurrent.locks.Lock;
45 import java.util.concurrent.locks.ReentrantReadWriteLock;
46 import java.util.logging.Level;
47 import java.util.stream.Collectors;
48 import javax.annotation.Nonnull;
49 import javax.annotation.Nullable;
50 import org.apache.commons.lang3.StringUtils;
51 import org.apache.commons.lang3.tuple.ImmutablePair;
52 import org.joda.time.DateTimeZone;
53 import org.joda.time.Interval;
54 import org.joda.time.Period;
79 import org.sqlite.SQLiteJDBCLoader;
95 Class.forName(
"org.sqlite.JDBC");
96 }
catch (ClassNotFoundException ex) {
97 LOGGER.log(Level.SEVERE,
"Failed to load sqlite JDBC driver", ex);
113 }
catch (SQLException ex) {
114 LOGGER.log(Level.SEVERE,
"sql error creating database connection", ex);
116 }
catch (Exception ex) {
117 LOGGER.log(Level.SEVERE,
"error creating database connection", ex);
122 private volatile Connection
con;
148 private final Lock
DBLock =
new ReentrantReadWriteLock(
true).writeLock();
152 this.dbPath = Paths.get(autoCase.getCaseDirectory(),
"events.db").toString();
170 }
catch (SQLException ex) {
171 LOGGER.log(Level.WARNING,
"Failed to close connection to evetns.db", ex);
179 try (Statement stmt = con.createStatement();
180 ResultSet rs = stmt.executeQuery(
"SELECT Min(time), Max(time) FROM events WHERE event_id IN (" + StringUtils.join(eventIDs,
", ") +
")");) {
182 return new Interval(rs.getLong(
"Min(time)") * 1000, (rs.getLong(
"Max(time)") + 1) * 1000, DateTimeZone.UTC);
184 }
catch (SQLException ex) {
185 LOGGER.log(Level.SEVERE,
"Error executing get spanning interval query.", ex);
192 EventTransaction beginTransaction() {
193 return new EventTransaction();
196 void commitTransaction(EventTransaction tr) {
198 throw new IllegalArgumentException(
"can't close already closed transaction");
207 int countAllEvents() {
209 try (ResultSet rs = countAllEventsStmt.executeQuery()) {
211 return rs.getInt(
"count");
213 }
catch (SQLException ex) {
214 LOGGER.log(Level.SEVERE,
"Error counting all events", ex);
230 Map<EventType, Long> countEventsByType(ZoomParams params) {
231 if (params.getTimeRange() != null) {
232 return countEventsByType(params.getTimeRange().getStartMillis() / 1000,
233 params.getTimeRange().getEndMillis() / 1000,
234 params.getFilter(), params.getTypeZoomLevel());
236 return Collections.emptyMap();
248 Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) {
249 HashMap<String, Long> counts =
new HashMap<>();
251 try (Statement createStatement = con.createStatement();
252 ResultSet rs = createStatement.executeQuery(
"SELECT tag_name_display_name, COUNT(DISTINCT tag_id) AS count FROM tags"
253 +
" WHERE event_id IN (" + StringUtils.join(eventIDsWithTags,
", ") +
")"
254 +
" GROUP BY tag_name_id"
255 +
" ORDER BY tag_name_display_name");) {
257 counts.put(rs.getString(
"tag_name_display_name"), rs.getLong(
"count"));
259 }
catch (SQLException ex) {
260 LOGGER.log(Level.SEVERE,
"Failed to get tag counts by tag name.", ex);
271 void reInitializeDB() {
274 dropEventsTableStmt.executeUpdate();
275 dropHashSetHitsTableStmt.executeUpdate();
276 dropHashSetsTableStmt.executeUpdate();
277 dropTagsTableStmt.executeUpdate();
278 dropDBInfoTableStmt.executeUpdate();
280 }
catch (SQLException ex) {
281 LOGGER.log(Level.SEVERE,
"could not drop old tables", ex);
291 void reInitializeTags() {
294 dropTagsTableStmt.executeUpdate();
296 }
catch (SQLException ex) {
297 LOGGER.log(Level.SEVERE,
"could not drop old tags table", ex);
303 Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter) {
304 long start = timeRange.getStartMillis() / 1000;
305 long end = timeRange.getEndMillis() / 1000;
306 final String sqlWhere = SQLHelper.getSQLWhere(filter);
308 try (Statement stmt = con.createStatement();
309 ResultSet rs = stmt.executeQuery(
" SELECT (SELECT Max(time) FROM events " + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) +
" WHERE time <=" + start +
" AND " + sqlWhere +
") AS start,"
310 +
"(SELECT Min(time) FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) +
" WHERE time >= " + end +
" AND " + sqlWhere +
") AS end")) {
313 long start2 = rs.getLong(
"start");
314 long end2 = rs.getLong(
"end");
319 return new Interval(start2 * 1000, (end2 + 1) * 1000, TimeLineController.getJodaTimeZone());
321 }
catch (SQLException ex) {
322 LOGGER.log(Level.SEVERE,
"Failed to get MIN time.", ex);
329 SingleEvent getEventById(Long eventID) {
330 SingleEvent result = null;
333 getEventByIDStmt.clearParameters();
334 getEventByIDStmt.setLong(1, eventID);
335 try (ResultSet rs = getEventByIDStmt.executeQuery()) {
341 }
catch (SQLException sqlEx) {
342 LOGGER.log(Level.SEVERE,
"exception while querying for event with id = " + eventID, sqlEx);
359 List<Long> getEventIDs(Interval timeRange, RootFilter filter) {
360 Long startTime = timeRange.getStartMillis() / 1000;
361 Long endTime = timeRange.getEndMillis() / 1000;
363 if (Objects.equals(startTime, endTime)) {
367 ArrayList<Long> resultIDs =
new ArrayList<>();
370 final String query =
"SELECT events.event_id AS event_id FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter)
371 +
" WHERE time >= " + startTime +
" AND time <" + endTime +
" AND " + SQLHelper.getSQLWhere(filter) +
" ORDER BY time ASC";
372 try (Statement stmt = con.createStatement();
373 ResultSet rs = stmt.executeQuery(query)) {
375 resultIDs.add(rs.getLong(
"event_id"));
378 }
catch (SQLException sqlEx) {
379 LOGGER.log(Level.SEVERE,
"failed to execute query for event ids in range", sqlEx);
398 List<CombinedEvent> getCombinedEvents(Interval timeRange, RootFilter filter) {
399 Long startTime = timeRange.getStartMillis() / 1000;
400 Long endTime = timeRange.getEndMillis() / 1000;
402 if (Objects.equals(startTime, endTime)) {
406 ArrayList<CombinedEvent> results =
new ArrayList<>();
409 final String query =
"SELECT full_description, time, file_id, GROUP_CONCAT(events.event_id), GROUP_CONCAT(sub_type)"
410 +
" FROM events " + useHashHitTablesHelper(filter) + useTagTablesHelper(filter)
411 +
" WHERE time >= " + startTime +
" AND time <" + endTime +
" AND " + SQLHelper.getSQLWhere(filter)
412 +
" GROUP BY time,full_description, file_id ORDER BY time ASC, full_description";
413 try (Statement stmt = con.createStatement();
414 ResultSet rs = stmt.executeQuery(query)) {
418 List<Long> eventIDs = SQLHelper.unGroupConcat(rs.getString(
"GROUP_CONCAT(events.event_id)"), Long::valueOf);
419 List<EventType> eventTypes = SQLHelper.unGroupConcat(rs.getString(
"GROUP_CONCAT(sub_type)"), s -> RootEventType.allTypes.get(Integer.valueOf(s)));
420 Map<EventType, Long> eventMap =
new HashMap<>();
421 for (
int i = 0; i < eventIDs.size(); i++) {
422 eventMap.put(eventTypes.get(i), eventIDs.get(i));
424 results.add(
new CombinedEvent(rs.getLong(
"time") * 1000, rs.getString(
"full_description"), rs.getLong(
"file_id"), eventMap));
427 }
catch (SQLException sqlEx) {
428 LOGGER.log(Level.SEVERE,
"failed to execute query for combined events", sqlEx);
440 boolean hasNewColumns() {
442 && (getDataSourceIDs().isEmpty() ==
false);
445 Set<Long> getDataSourceIDs() {
446 HashSet<Long> hashSet =
new HashSet<>();
448 try (ResultSet rs = getDataSourceIDsStmt.executeQuery()) {
450 long datasourceID = rs.getLong(
"datasource_id");
451 hashSet.add(datasourceID);
453 }
catch (SQLException ex) {
454 LOGGER.log(Level.SEVERE,
"Failed to get MAX time.", ex);
461 Map<Long, String> getHashSetNames() {
462 Map<Long, String> hashSets =
new HashMap<>();
464 try (ResultSet rs = getHashSetNamesStmt.executeQuery();) {
466 long hashSetID = rs.getLong(
"hash_set_id");
467 String hashSetName = rs.getString(
"hash_set_name");
468 hashSets.put(hashSetID, hashSetName);
470 }
catch (SQLException ex) {
471 LOGGER.log(Level.SEVERE,
"Failed to get hash sets.", ex);
475 return Collections.unmodifiableMap(hashSets);
480 try (Statement createStatement = con.createStatement()) {
481 boolean b = createStatement.execute(
"analyze; analyze sqlite_master;");
482 }
catch (SQLException ex) {
483 LOGGER.log(Level.SEVERE,
"Failed to analyze events db.", ex);
494 try (ResultSet rs = getMaxTimeStmt.executeQuery()) {
496 return rs.getLong(
"max");
498 }
catch (SQLException ex) {
499 LOGGER.log(Level.SEVERE,
"Failed to get MAX time.", ex);
511 try (ResultSet rs = getMinTimeStmt.executeQuery()) {
513 return rs.getLong(
"min");
515 }
catch (SQLException ex) {
516 LOGGER.log(Level.SEVERE,
"Failed to get MIN time.", ex);
529 final synchronized void initializeDB() {
532 if (con == null || con.isClosed()) {
533 con = DriverManager.getConnection(
"jdbc:sqlite:" + dbPath);
535 }
catch (SQLException ex) {
536 LOGGER.log(Level.SEVERE,
"Failed to open connection to events.db", ex);
541 }
catch (SQLException ex) {
542 LOGGER.log(Level.SEVERE,
"problem accessing database", ex);
548 try (Statement stmt = con.createStatement()) {
549 String sql =
"CREATE TABLE if not exists db_info "
552 +
"PRIMARY KEY (key))";
554 }
catch (SQLException ex) {
555 LOGGER.log(Level.SEVERE,
"problem creating db_info table", ex);
558 try (Statement stmt = con.createStatement()) {
559 String sql =
"CREATE TABLE if not exists events "
560 +
" (event_id INTEGER PRIMARY KEY, "
561 +
" datasource_id INTEGER, "
562 +
" file_id INTEGER, "
563 +
" artifact_id INTEGER, "
565 +
" sub_type INTEGER, "
566 +
" base_type INTEGER, "
567 +
" full_description TEXT, "
568 +
" med_description TEXT, "
569 +
" short_description TEXT, "
570 +
" known_state INTEGER,"
571 +
" hash_hit INTEGER,"
572 +
" tagged INTEGER)";
574 }
catch (SQLException ex) {
575 LOGGER.log(Level.SEVERE,
"problem creating database table", ex);
579 try (Statement stmt = con.createStatement()) {
580 String sql =
"ALTER TABLE events ADD COLUMN datasource_id INTEGER";
582 }
catch (SQLException ex) {
583 LOGGER.log(Level.SEVERE,
"problem upgrading events table", ex);
587 try (Statement stmt = con.createStatement()) {
588 String sql =
"ALTER TABLE events ADD COLUMN tagged INTEGER";
590 }
catch (SQLException ex) {
591 LOGGER.log(Level.SEVERE,
"problem upgrading events table", ex);
596 try (Statement stmt = con.createStatement()) {
597 String sql =
"ALTER TABLE events ADD COLUMN hash_hit INTEGER";
599 }
catch (SQLException ex) {
600 LOGGER.log(Level.SEVERE,
"problem upgrading events table", ex);
604 try (Statement stmt = con.createStatement()) {
605 String sql =
"CREATE TABLE if not exists hash_sets "
606 +
"( hash_set_id INTEGER primary key,"
607 +
" hash_set_name VARCHAR(255) UNIQUE NOT NULL)";
609 }
catch (SQLException ex) {
610 LOGGER.log(Level.SEVERE,
"problem creating hash_sets table", ex);
613 try (Statement stmt = con.createStatement()) {
614 String sql =
"CREATE TABLE if not exists hash_set_hits "
615 +
"(hash_set_id INTEGER REFERENCES hash_sets(hash_set_id) not null, "
616 +
" event_id INTEGER REFERENCES events(event_id) not null, "
617 +
" PRIMARY KEY (hash_set_id, event_id))";
619 }
catch (SQLException ex) {
620 LOGGER.log(Level.SEVERE,
"problem creating hash_set_hits table", ex);
625 createIndex(
"events", Arrays.asList(
"datasource_id"));
626 createIndex(
"events", Arrays.asList(
"event_id",
"hash_hit"));
627 createIndex(
"events", Arrays.asList(
"event_id",
"tagged"));
629 createIndex(
"events", Arrays.asList(
"artifact_id"));
630 createIndex(
"events", Arrays.asList(
"sub_type",
"short_description",
"time"));
631 createIndex(
"events", Arrays.asList(
"base_type",
"short_description",
"time"));
633 createIndex(
"events", Arrays.asList(
"known_state"));
637 "INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) "
638 +
"VALUES (?,?,?,?,?,?,?,?,?,?,?,?)");
639 getHashSetNamesStmt =
prepareStatement(
"SELECT hash_set_id, hash_set_name FROM hash_sets");
640 getDataSourceIDsStmt =
prepareStatement(
"SELECT DISTINCT datasource_id FROM events WHERE datasource_id != 0");
643 getEventByIDStmt =
prepareStatement(
"SELECT * FROM events WHERE event_id = ?");
644 insertHashSetStmt =
prepareStatement(
"INSERT OR IGNORE INTO hash_sets (hash_set_name) values (?)");
645 selectHashSetStmt =
prepareStatement(
"SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?");
646 insertHashHitStmt =
prepareStatement(
"INSERT OR IGNORE INTO hash_set_hits (hash_set_id, event_id) values (?,?)");
647 insertTagStmt =
prepareStatement(
"INSERT OR IGNORE INTO tags (tag_id, tag_name_id,tag_name_display_name, event_id) values (?,?,?,?)");
656 countAllEventsStmt =
prepareStatement(
"SELECT count(event_id) AS count FROM events WHERE event_id IS NOT null");
658 dropHashSetHitsTableStmt =
prepareStatement(
"DROP TABLE IF EXISTS hash_set_hits");
662 selectNonArtifactEventIDsByObjectIDStmt =
prepareStatement(
"SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS NULL");
663 selectEventIDsBYObjectAndArtifactIDStmt =
prepareStatement(
"SELECT event_id FROM events WHERE file_id == ? AND artifact_id = ?");
664 }
catch (SQLException sQLException) {
665 LOGGER.log(Level.SEVERE,
"failed to prepareStatment", sQLException);
681 List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) {
684 String query =
"SELECT event_id FROM events WHERE artifact_id == " + artifact.getArtifactID();
686 ArrayList<Long> results =
new ArrayList<>();
687 try (Statement stmt = con.createStatement();
688 ResultSet rs = stmt.executeQuery(query);) {
690 results.add(rs.getLong(
"event_id"));
692 }
catch (SQLException ex) {
693 LOGGER.log(Level.SEVERE,
"Error executing getEventIDsForArtifact query.", ex);
715 List<Long> getEventIDsForFile(AbstractFile file,
boolean includeDerivedArtifacts) {
718 String query =
"SELECT event_id FROM events WHERE file_id == " + file.getId()
719 + (includeDerivedArtifacts ?
"" :
" AND artifact_id IS NULL");
721 ArrayList<Long> results =
new ArrayList<>();
722 try (Statement stmt = con.createStatement();
723 ResultSet rs = stmt.executeQuery(query);) {
725 results.add(rs.getLong(
"event_id"));
727 }
catch (SQLException ex) {
728 LOGGER.log(Level.SEVERE,
"Error executing getEventIDsForFile query.", ex);
740 try (Statement stmt = con.createStatement()) {
741 String sql =
"CREATE TABLE IF NOT EXISTS tags "
742 +
"(tag_id INTEGER NOT NULL,"
743 +
" tag_name_id INTEGER NOT NULL, "
744 +
" tag_name_display_name TEXT NOT NULL, "
745 +
" event_id INTEGER REFERENCES events(event_id) NOT NULL, "
746 +
" PRIMARY KEY (event_id, tag_name_id))";
748 }
catch (SQLException ex) {
749 LOGGER.log(Level.SEVERE,
"problem creating tags table", ex);
758 private void createIndex(
final String tableName,
final List<String> columnList) {
759 String indexColumns = columnList.stream().collect(Collectors.joining(
",",
"(",
")"));
760 String indexName = tableName +
"_" + StringUtils.join(columnList,
"_") +
"_idx";
761 try (Statement stmt = con.createStatement()) {
763 String sql =
"CREATE INDEX IF NOT EXISTS " + indexName +
" ON " + tableName + indexColumns;
765 }
catch (SQLException ex) {
766 LOGGER.log(Level.SEVERE,
"problem creating index " + indexName, ex);
776 try (Statement stmt = con.createStatement()) {
778 ResultSet executeQuery = stmt.executeQuery(
"PRAGMA table_info(events)");
779 while (executeQuery.next()) {
780 if (dbColumn.equals(executeQuery.getString(
"name"))) {
784 }
catch (SQLException ex) {
785 LOGGER.log(Level.SEVERE,
"problem executing pragma", ex);
802 void insertEvent(
long time,
EventType type,
long datasourceID,
long objID,
803 Long artifactID, String fullDescription, String medDescription,
804 String shortDescription, TskData.FileKnown known, Set<String> hashSets, List<? extends Tag> tags) {
806 EventTransaction transaction = beginTransaction();
807 insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, tags, transaction);
808 commitTransaction(transaction);
817 void insertEvent(
long time,
EventType type,
long datasourceID,
long objID,
818 Long artifactID, String fullDescription, String medDescription,
819 String shortDescription, TskData.FileKnown known, Set<String> hashSetNames,
820 List<? extends Tag> tags, EventTransaction transaction) {
822 if (transaction.isClosed()) {
823 throw new IllegalArgumentException(
"can't update database with closed transaction");
825 int typeNum = RootEventType.allTypes.indexOf(type);
832 insertRowStmt.clearParameters();
833 insertRowStmt.setLong(1, datasourceID);
834 insertRowStmt.setLong(2, objID);
835 if (artifactID != null) {
836 insertRowStmt.setLong(3, artifactID);
838 insertRowStmt.setNull(3, Types.NULL);
840 insertRowStmt.setLong(4, time);
843 insertRowStmt.setInt(5, typeNum);
845 insertRowStmt.setNull(5, Types.INTEGER);
848 insertRowStmt.setInt(6, superTypeNum);
849 insertRowStmt.setString(7, fullDescription);
850 insertRowStmt.setString(8, medDescription);
851 insertRowStmt.setString(9, shortDescription);
853 insertRowStmt.setByte(10, known == null ? TskData.FileKnown.UNKNOWN.getFileKnownValue() : known.getFileKnownValue());
855 insertRowStmt.setInt(11, hashSetNames.isEmpty() ? 0 : 1);
856 insertRowStmt.setInt(12, tags.isEmpty() ? 0 : 1);
858 insertRowStmt.executeUpdate();
860 try (ResultSet generatedKeys = insertRowStmt.getGeneratedKeys()) {
861 while (generatedKeys.next()) {
862 long eventID = generatedKeys.getLong(
"last_insert_rowid()");
863 for (String name : hashSetNames) {
866 insertHashSetStmt.setString(1, name);
867 insertHashSetStmt.executeUpdate();
871 selectHashSetStmt.setString(1, name);
872 try (ResultSet rs = selectHashSetStmt.executeQuery()) {
874 int hashsetID = rs.getInt(
"hash_set_id");
876 insertHashHitStmt.setInt(1, hashsetID);
877 insertHashHitStmt.setLong(2, eventID);
878 insertHashHitStmt.executeUpdate();
883 for (Tag tag : tags) {
891 }
catch (SQLException ex) {
892 LOGGER.log(Level.SEVERE,
"failed to insert event", ex);
911 Set<Long> addTag(
long objectID, @Nullable Long artifactID, Tag tag, EventTransaction transaction) {
912 if (transaction != null && transaction.isClosed()) {
913 throw new IllegalArgumentException(
"can't update database with closed transaction");
918 for (Long eventID : eventIDs) {
922 }
catch (SQLException ex) {
923 LOGGER.log(Level.SEVERE,
"failed to add tag to event", ex);
927 return Collections.emptySet();
941 private void insertTag(Tag tag,
long eventID)
throws SQLException {
944 insertTagStmt.clearParameters();
945 insertTagStmt.setLong(1, tag.getId());
946 insertTagStmt.setLong(2, tag.getName().getId());
947 insertTagStmt.setString(3, tag.getName().getDisplayName());
948 insertTagStmt.setLong(4, eventID);
949 insertTagStmt.executeUpdate();
967 Set<Long> deleteTag(
long objectID, @Nullable Long artifactID,
long tagID,
boolean stillTagged) {
971 deleteTagStmt.clearParameters();
972 deleteTagStmt.setLong(1, tagID);
973 deleteTagStmt.executeUpdate();
976 }
catch (SQLException ex) {
977 LOGGER.log(Level.SEVERE,
"failed to add tag to event", ex);
981 return Collections.emptySet();
1004 private Set<Long>
markEventsTagged(
long objectID, @Nullable Long artifactID,
boolean tagged)
throws SQLException {
1006 PreparedStatement selectStmt;
1007 if (Objects.isNull(artifactID)) {
1009 selectNonArtifactEventIDsByObjectIDStmt.clearParameters();
1010 selectNonArtifactEventIDsByObjectIDStmt.setLong(1, objectID);
1014 selectEventIDsBYObjectAndArtifactIDStmt.clearParameters();
1015 selectEventIDsBYObjectAndArtifactIDStmt.setLong(1, objectID);
1016 selectEventIDsBYObjectAndArtifactIDStmt.setLong(2, artifactID);
1020 HashSet<Long> eventIDs =
new HashSet<>();
1021 try (ResultSet executeQuery = selectStmt.executeQuery();) {
1022 while (executeQuery.next()) {
1023 eventIDs.add(executeQuery.getLong(
"event_id"));
1028 try (Statement updateStatement = con.createStatement();) {
1029 updateStatement.executeUpdate(
"UPDATE events SET tagged = " + (tagged ? 1 : 0)
1030 +
" WHERE event_id IN (" + StringUtils.join(eventIDs,
",") +
")");
1036 void rollBackTransaction(EventTransaction trans) {
1041 for (PreparedStatement pStmt : preparedStatements) {
1049 try (Statement statement = con.createStatement()) {
1051 statement.execute(
"PRAGMA synchronous = OFF;");
1054 statement.execute(
"PRAGMA count_changes = OFF;");
1056 statement.execute(
"PRAGMA temp_store = MEMORY");
1058 statement.execute(
"PRAGMA cache_size = 50000");
1060 statement.execute(
"PRAGMA auto_vacuum = 0");
1062 statement.execute(
"PRAGMA read_uncommitted = True;");
1068 LOGGER.log(Level.INFO, String.format(
"sqlite-jdbc version %s loaded in %s mode",
1069 SQLiteJDBCLoader.getVersion(), SQLiteJDBCLoader.isNativeMode() ?
"native" :
"pure-java"));
1070 }
catch (Exception exception) {
1071 LOGGER.log(Level.SEVERE,
"Failed to determine if sqlite-jdbc is loaded in native or pure-java mode.", exception);
1077 rs.getLong(
"datasource_id"),
1078 rs.getLong(
"file_id"),
1079 rs.getLong(
"artifact_id"),
1081 rs.getString(
"full_description"),
1082 rs.getString(
"med_description"),
1083 rs.getString(
"short_description"),
1084 TskData.FileKnown.valueOf(rs.getByte(
"known_state")),
1085 rs.getInt(
"hash_hit") != 0,
1086 rs.getInt(
"tagged") != 0);
1106 if (Objects.equals(startTime, endTime)) {
1110 Map<EventType, Long> typeMap =
new HashMap<>();
1116 final String queryString =
"SELECT count(DISTINCT events.event_id) AS count, " +
typeColumnHelper(useSubTypes)
1117 +
" FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) +
" WHERE time >= " + startTime +
" AND time < " + endTime +
" AND " + SQLHelper.getSQLWhere(filter)
1121 try (Statement stmt = con.createStatement();
1122 ResultSet rs = stmt.executeQuery(queryString);) {
1126 :
BaseTypes.values()[rs.getInt(
"base_type")];
1128 typeMap.put(type, rs.getLong(
"count"));
1131 }
catch (Exception ex) {
1132 LOGGER.log(Level.SEVERE,
"Error getting count of events from db.", ex);
1150 List<EventStripe> getEventStripes(
ZoomParams params) {
1157 long start = timeRange.getStartMillis() / 1000;
1158 long end = timeRange.getEndMillis() / 1000;
1161 end = Math.max(end, start + 1);
1167 String strfTimeFormat = SQLHelper.getStrfTimeFormat(rangeInfo.
getPeriodSize());
1168 String descriptionColumn = SQLHelper.getDescriptionColumn(descriptionLOD);
1174 String query =
"SELECT strftime('" + strfTimeFormat +
"',time , 'unixepoch'" + timeZone +
") AS interval,"
1175 +
"\n group_concat(events.event_id) as event_ids,"
1176 +
"\n group_concat(CASE WHEN hash_hit = 1 THEN events.event_id ELSE NULL END) as hash_hits,"
1177 +
"\n group_concat(CASE WHEN tagged = 1 THEN events.event_id ELSE NULL END) as taggeds,"
1178 +
"\n min(time), max(time), " + typeColumn +
", " + descriptionColumn
1179 +
"\n FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter)
1180 +
"\n WHERE time >= " + start +
" AND time < " + end +
" AND " + SQLHelper.getSQLWhere(filter)
1181 +
"\n GROUP BY interval, " + typeColumn +
" , " + descriptionColumn
1182 +
"\n ORDER BY min(time)";
1193 List<EventCluster> events =
new ArrayList<>();
1196 try (Statement createStatement = con.createStatement();
1197 ResultSet rs = createStatement.executeQuery(query)) {
1201 }
catch (SQLException ex) {
1202 LOGGER.log(Level.SEVERE,
"Failed to get events with query: " + query, ex);
1226 String eventIDsString = rs.getString(
"event_ids");
1227 List<Long> eventIDs = SQLHelper.unGroupConcat(eventIDsString, Long::valueOf);
1228 String description = rs.getString(SQLHelper.getDescriptionColumn(descriptionLOD));
1231 List<Long> hashHits = SQLHelper.unGroupConcat(rs.getString(
"hash_hits"), Long::valueOf);
1232 List<Long> tagged = SQLHelper.unGroupConcat(rs.getString(
"taggeds"), Long::valueOf);
1234 return new EventCluster(interval, type, eventIDs, hashHits, tagged, description, descriptionLOD);
1253 Map<EventType, SetMultimap< String, EventCluster>> typeMap =
new HashMap<>();
1256 typeMap.computeIfAbsent(aggregateEvent.getEventType(), eventType -> HashMultimap.create())
1257 .put(aggregateEvent.getDescription(), aggregateEvent);
1260 ArrayList<EventCluster> aggEvents =
new ArrayList<>();
1263 for (SetMultimap<String, EventCluster> descrMap : typeMap.values()) {
1265 for (String descr : descrMap.keySet()) {
1267 Iterator<EventCluster> iterator = descrMap.get(descr).stream()
1268 .sorted(Comparator.comparing(event -> event.getSpan().getStartMillis()))
1271 while (iterator.hasNext()) {
1277 if (gap == null || gap.toDuration().getMillis() <= timeUnitLength.toDurationFrom(gap.getStart()).getMillis() / 4) {
1282 aggEvents.add(current);
1286 aggEvents.add(current);
1291 Map<ImmutablePair<EventType, String>,
EventStripe> stripeDescMap =
new HashMap<>();
1294 stripeDescMap.
merge(ImmutablePair.of(eventCluster.getEventType(), eventCluster.getDescription()),
1302 return useSubTypes ?
"sub_type" :
"base_type";
1307 preparedStatements.add(prepareStatement);
1330 con.setAutoCommit(
false);
1331 }
catch (SQLException ex) {
1332 LOGGER.log(Level.SEVERE,
"failed to set auto-commit to to false", ex);
1342 }
catch (SQLException ex1) {
1343 LOGGER.log(Level.SEVERE,
"Exception while attempting to rollback!!", ex1);
1357 }
catch (SQLException ex) {
1358 LOGGER.log(Level.SEVERE,
"Error commiting events.db.", ex);
1367 con.setAutoCommit(
true);
1368 }
catch (SQLException ex) {
1369 LOGGER.log(Level.SEVERE,
"Error setting auto-commit to true.", ex);
static Version.Type getBuildType()
void insertTag(Tag tag, long eventID)
static List< EventStripe > mergeClustersToStripes(Period timeUnitLength, List< EventCluster > preMergedEvents)
static EventDB getEventDB(Case autoCase)
boolean hasDBColumn(@Nonnull final String dbColumn)
PreparedStatement getEventByIDStmt
SingleEvent constructTimeLineEvent(ResultSet rs)
Interval getSpanningInterval(Collection< Long > eventIDs)
PreparedStatement dropTagsTableStmt
PreparedStatement dropHashSetHitsTableStmt
PreparedStatement insertHashSetStmt
PreparedStatement selectEventIDsBYObjectAndArtifactIDStmt
PreparedStatement deleteTagStmt
static ReadOnlyObjectProperty< TimeZone > getTimeZone()
PreparedStatement dropEventsTableStmt
Map< EventType, Long > countEventsByType(Long startTime, Long endTime, RootFilter filter, EventTypeZoomLevel zoomLevel)
PreparedStatement selectNonArtifactEventIDsByObjectIDStmt
static String typeColumnHelper(final boolean useSubTypes)
EventCluster eventClusterHelper(ResultSet rs, boolean useSubTypes, DescriptionLoD descriptionLOD, TagsFilter filter)
boolean hasTaggedColumn()
PreparedStatement getMaxTimeStmt
PreparedStatement getMinTimeStmt
PreparedStatement prepareStatement(String queryString)
PreparedStatement insertTagStmt
PreparedStatement insertRowStmt
PreparedStatement dropHashSetsTableStmt
final Set< PreparedStatement > preparedStatements
static RangeDivisionInfo getRangeDivisionInfo(Interval timeRange)
static final List<?extends EventType > allTypes
PreparedStatement selectHashSetStmt
static EventStripe merge(EventStripe u, EventStripe v)
PreparedStatement getHashSetNamesStmt
boolean hasHashHitColumn()
EventTypeZoomLevel getTypeZoomLevel()
static DateTimeZone getJodaTimeZone()
DescriptionLoD getDescriptionLOD()
PreparedStatement dropDBInfoTableStmt
void createIndex(final String tableName, final List< String > columnList)
PreparedStatement getDataSourceIDsStmt
TagsFilter getTagsFilter()
static final org.sleuthkit.autopsy.coreutils.Logger LOGGER
synchronized static Logger getLogger(String name)
Set< Long > markEventsTagged(long objectID,@Nullable Long artifactID, boolean tagged)
static EventCluster merge(EventCluster cluster1, EventCluster cluster2)
TimeUnits getPeriodSize()
void initializeTagsTable()
PreparedStatement countAllEventsStmt
boolean hasDataSourceIDColumn()
PreparedStatement insertHashHitStmt