Autopsy  4.1
Graphical digital forensics platform for The Sleuth Kit and other tools.
EventDB.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2013-15 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.timeline.db;
20 
21 import com.google.common.collect.HashMultimap;
22 import com.google.common.collect.SetMultimap;
23 import java.nio.file.Paths;
24 import java.sql.Connection;
25 import java.sql.DriverManager;
26 import java.sql.PreparedStatement;
27 import java.sql.ResultSet;
28 import java.sql.SQLException;
29 import java.sql.Statement;
30 import java.sql.Types;
31 import java.util.ArrayList;
32 import java.util.Arrays;
33 import java.util.Collection;
34 import java.util.Collections;
35 import java.util.Comparator;
36 import java.util.HashMap;
37 import java.util.HashSet;
38 import java.util.Iterator;
39 import java.util.List;
40 import java.util.Map;
41 import java.util.Objects;
42 import java.util.Set;
43 import java.util.TimeZone;
44 import java.util.concurrent.locks.Lock;
45 import java.util.concurrent.locks.ReentrantReadWriteLock;
46 import java.util.logging.Level;
47 import java.util.stream.Collectors;
48 import javax.annotation.Nonnull;
49 import javax.annotation.Nullable;
50 import org.apache.commons.lang3.StringUtils;
51 import org.apache.commons.lang3.tuple.ImmutablePair;
52 import org.joda.time.DateTimeZone;
53 import org.joda.time.Interval;
54 import org.joda.time.Period;
66 import static org.sleuthkit.autopsy.timeline.db.SQLHelper.useHashHitTablesHelper;
67 import static org.sleuthkit.autopsy.timeline.db.SQLHelper.useTagTablesHelper;
74 import org.sleuthkit.datamodel.AbstractFile;
75 import org.sleuthkit.datamodel.BlackboardArtifact;
76 import org.sleuthkit.datamodel.SleuthkitCase;
77 import org.sleuthkit.datamodel.Tag;
78 import org.sleuthkit.datamodel.TskData;
79 import org.sqlite.SQLiteJDBCLoader;
80 
88 public class EventDB {
89 
90  private static final org.sleuthkit.autopsy.coreutils.Logger LOGGER = Logger.getLogger(EventDB.class.getName());
91 
92  static {
93  //make sure sqlite driver is loaded, possibly redundant
94  try {
95  Class.forName("org.sqlite.JDBC"); // NON-NLS
96  } catch (ClassNotFoundException ex) {
97  LOGGER.log(Level.SEVERE, "Failed to load sqlite JDBC driver", ex); // NON-NLS
98  }
99  }
100 
110  public static EventDB getEventDB(Case autoCase) {
111  try {
112  return new EventDB(autoCase);
113  } catch (SQLException ex) {
114  LOGGER.log(Level.SEVERE, "sql error creating database connection", ex); // NON-NLS
115  return null;
116  } catch (Exception ex) {
117  LOGGER.log(Level.SEVERE, "error creating database connection", ex); // NON-NLS
118  return null;
119  }
120  }
121 
122  private volatile Connection con;
123 
124  private final String dbPath;
125 
126  private PreparedStatement getEventByIDStmt;
127  private PreparedStatement getMaxTimeStmt;
128  private PreparedStatement getMinTimeStmt;
129  private PreparedStatement getDataSourceIDsStmt;
130  private PreparedStatement getHashSetNamesStmt;
131  private PreparedStatement insertRowStmt;
132  private PreparedStatement insertHashSetStmt;
133  private PreparedStatement insertHashHitStmt;
134  private PreparedStatement insertTagStmt;
135  private PreparedStatement deleteTagStmt;
136  private PreparedStatement selectHashSetStmt;
137  private PreparedStatement countAllEventsStmt;
138  private PreparedStatement dropEventsTableStmt;
139  private PreparedStatement dropHashSetHitsTableStmt;
140  private PreparedStatement dropHashSetsTableStmt;
141  private PreparedStatement dropTagsTableStmt;
142  private PreparedStatement dropDBInfoTableStmt;
143  private PreparedStatement selectNonArtifactEventIDsByObjectIDStmt;
144  private PreparedStatement selectEventIDsBYObjectAndArtifactIDStmt;
145 
146  private final Set<PreparedStatement> preparedStatements = new HashSet<>();
147 
148  private final Lock DBLock = new ReentrantReadWriteLock(true).writeLock(); //using exclusive lock for all db ops for now
149 
150  private EventDB(Case autoCase) throws SQLException, Exception {
151  //should this go into module output (or even cache, we should be able to rebuild it)?
152  this.dbPath = Paths.get(autoCase.getCaseDirectory(), "events.db").toString(); //NON-NLS
153  initializeDB();
154  }
155 
156  @Override
157  public void finalize() throws Throwable {
158  try {
159  closeDBCon();
160  } finally {
161  super.finalize();
162  }
163  }
164 
165  void closeDBCon() {
166  if (con != null) {
167  try {
168  closeStatements();
169  con.close();
170  } catch (SQLException ex) {
171  LOGGER.log(Level.WARNING, "Failed to close connection to evetns.db", ex); // NON-NLS
172  }
173  }
174  con = null;
175  }
176 
177  public Interval getSpanningInterval(Collection<Long> eventIDs) {
178  DBLock.lock();
179  try (Statement stmt = con.createStatement();
180  ResultSet rs = stmt.executeQuery("SELECT Min(time), Max(time) FROM events WHERE event_id IN (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
181  while (rs.next()) {
182  return new Interval(rs.getLong("Min(time)") * 1000, (rs.getLong("Max(time)") + 1) * 1000, DateTimeZone.UTC); // NON-NLS
183  }
184  } catch (SQLException ex) {
185  LOGGER.log(Level.SEVERE, "Error executing get spanning interval query.", ex); // NON-NLS
186  } finally {
187  DBLock.unlock();
188  }
189  return null;
190  }
191 
192  EventTransaction beginTransaction() {
193  return new EventTransaction();
194  }
195 
196  void commitTransaction(EventTransaction tr) {
197  if (tr.isClosed()) {
198  throw new IllegalArgumentException("can't close already closed transaction"); // NON-NLS
199  }
200  tr.commit();
201  }
202 
207  int countAllEvents() {
208  DBLock.lock();
209  try (ResultSet rs = countAllEventsStmt.executeQuery()) { // NON-NLS
210  while (rs.next()) {
211  return rs.getInt("count"); // NON-NLS
212  }
213  } catch (SQLException ex) {
214  LOGGER.log(Level.SEVERE, "Error counting all events", ex); //NON-NLS
215  } finally {
216  DBLock.unlock();
217  }
218  return -1;
219  }
220 
230  Map<EventType, Long> countEventsByType(ZoomParams params) {
231  if (params.getTimeRange() != null) {
232  return countEventsByType(params.getTimeRange().getStartMillis() / 1000,
233  params.getTimeRange().getEndMillis() / 1000,
234  params.getFilter(), params.getTypeZoomLevel());
235  } else {
236  return Collections.emptyMap();
237  }
238  }
239 
248  Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) {
249  HashMap<String, Long> counts = new HashMap<>();
250  DBLock.lock();
251  try (Statement createStatement = con.createStatement();
252  ResultSet rs = createStatement.executeQuery("SELECT tag_name_display_name, COUNT(DISTINCT tag_id) AS count FROM tags" //NON-NLS
253  + " WHERE event_id IN (" + StringUtils.join(eventIDsWithTags, ", ") + ")" //NON-NLS
254  + " GROUP BY tag_name_id" //NON-NLS
255  + " ORDER BY tag_name_display_name");) { //NON-NLS
256  while (rs.next()) {
257  counts.put(rs.getString("tag_name_display_name"), rs.getLong("count")); //NON-NLS
258  }
259  } catch (SQLException ex) {
260  LOGGER.log(Level.SEVERE, "Failed to get tag counts by tag name.", ex); //NON-NLS
261  } finally {
262  DBLock.unlock();
263  }
264  return counts;
265  }
266 
271  void reInitializeDB() {
272  DBLock.lock();
273  try {
274  dropEventsTableStmt.executeUpdate();
275  dropHashSetHitsTableStmt.executeUpdate();
276  dropHashSetsTableStmt.executeUpdate();
277  dropTagsTableStmt.executeUpdate();
278  dropDBInfoTableStmt.executeUpdate();
279  initializeDB();
280  } catch (SQLException ex) {
281  LOGGER.log(Level.SEVERE, "could not drop old tables", ex); // NON-NLS
282  } finally {
283  DBLock.unlock();
284  }
285  }
286 
291  void reInitializeTags() {
292  DBLock.lock();
293  try {
294  dropTagsTableStmt.executeUpdate();
296  } catch (SQLException ex) {
297  LOGGER.log(Level.SEVERE, "could not drop old tags table", ex); // NON-NLS
298  } finally {
299  DBLock.unlock();
300  }
301  }
302 
303  Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter) {
304  long start = timeRange.getStartMillis() / 1000;
305  long end = timeRange.getEndMillis() / 1000;
306  final String sqlWhere = SQLHelper.getSQLWhere(filter);
307  DBLock.lock();
308  try (Statement stmt = con.createStatement(); //can't use prepared statement because of complex where clause
309  ResultSet rs = stmt.executeQuery(" SELECT (SELECT Max(time) FROM events " + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time <=" + start + " AND " + sqlWhere + ") AS start," //NON-NLS
310  + "(SELECT Min(time) FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + end + " AND " + sqlWhere + ") AS end")) { // NON-NLS
311  while (rs.next()) {
312 
313  long start2 = rs.getLong("start"); // NON-NLS
314  long end2 = rs.getLong("end"); // NON-NLS
315 
316  if (end2 == 0) {
317  end2 = getMaxTime();
318  }
319  return new Interval(start2 * 1000, (end2 + 1) * 1000, TimeLineController.getJodaTimeZone());
320  }
321  } catch (SQLException ex) {
322  LOGGER.log(Level.SEVERE, "Failed to get MIN time.", ex); // NON-NLS
323  } finally {
324  DBLock.unlock();
325  }
326  return null;
327  }
328 
329  SingleEvent getEventById(Long eventID) {
330  SingleEvent result = null;
331  DBLock.lock();
332  try {
333  getEventByIDStmt.clearParameters();
334  getEventByIDStmt.setLong(1, eventID);
335  try (ResultSet rs = getEventByIDStmt.executeQuery()) {
336  while (rs.next()) {
337  result = constructTimeLineEvent(rs);
338  break;
339  }
340  }
341  } catch (SQLException sqlEx) {
342  LOGGER.log(Level.SEVERE, "exception while querying for event with id = " + eventID, sqlEx); // NON-NLS
343  } finally {
344  DBLock.unlock();
345  }
346  return result;
347  }
348 
359  List<Long> getEventIDs(Interval timeRange, RootFilter filter) {
360  Long startTime = timeRange.getStartMillis() / 1000;
361  Long endTime = timeRange.getEndMillis() / 1000;
362 
363  if (Objects.equals(startTime, endTime)) {
364  endTime++; //make sure end is at least 1 millisecond after start
365  }
366 
367  ArrayList<Long> resultIDs = new ArrayList<>();
368 
369  DBLock.lock();
370  final String query = "SELECT events.event_id AS event_id FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter)
371  + " WHERE time >= " + startTime + " AND time <" + endTime + " AND " + SQLHelper.getSQLWhere(filter) + " ORDER BY time ASC"; // NON-NLS
372  try (Statement stmt = con.createStatement();
373  ResultSet rs = stmt.executeQuery(query)) {
374  while (rs.next()) {
375  resultIDs.add(rs.getLong("event_id")); //NON-NLS
376  }
377 
378  } catch (SQLException sqlEx) {
379  LOGGER.log(Level.SEVERE, "failed to execute query for event ids in range", sqlEx); // NON-NLS
380  } finally {
381  DBLock.unlock();
382  }
383 
384  return resultIDs;
385  }
386 
398  List<CombinedEvent> getCombinedEvents(Interval timeRange, RootFilter filter) {
399  Long startTime = timeRange.getStartMillis() / 1000;
400  Long endTime = timeRange.getEndMillis() / 1000;
401 
402  if (Objects.equals(startTime, endTime)) {
403  endTime++; //make sure end is at least 1 millisecond after start
404  }
405 
406  ArrayList<CombinedEvent> results = new ArrayList<>();
407 
408  DBLock.lock();
409  final String query = "SELECT full_description, time, file_id, GROUP_CONCAT(events.event_id), GROUP_CONCAT(sub_type)"
410  + " FROM events " + useHashHitTablesHelper(filter) + useTagTablesHelper(filter)
411  + " WHERE time >= " + startTime + " AND time <" + endTime + " AND " + SQLHelper.getSQLWhere(filter)
412  + " GROUP BY time,full_description, file_id ORDER BY time ASC, full_description";
413  try (Statement stmt = con.createStatement();
414  ResultSet rs = stmt.executeQuery(query)) {
415  while (rs.next()) {
416 
417  //make a map from event type to event ID
418  List<Long> eventIDs = SQLHelper.unGroupConcat(rs.getString("GROUP_CONCAT(events.event_id)"), Long::valueOf);
419  List<EventType> eventTypes = SQLHelper.unGroupConcat(rs.getString("GROUP_CONCAT(sub_type)"), s -> RootEventType.allTypes.get(Integer.valueOf(s)));
420  Map<EventType, Long> eventMap = new HashMap<>();
421  for (int i = 0; i < eventIDs.size(); i++) {
422  eventMap.put(eventTypes.get(i), eventIDs.get(i));
423  }
424  results.add(new CombinedEvent(rs.getLong("time") * 1000, rs.getString("full_description"), rs.getLong("file_id"), eventMap));
425  }
426 
427  } catch (SQLException sqlEx) {
428  LOGGER.log(Level.SEVERE, "failed to execute query for combined events", sqlEx); // NON-NLS
429  } finally {
430  DBLock.unlock();
431  }
432 
433  return results;
434  }
435 
440  boolean hasNewColumns() {
442  && (getDataSourceIDs().isEmpty() == false);
443  }
444 
445  Set<Long> getDataSourceIDs() {
446  HashSet<Long> hashSet = new HashSet<>();
447  DBLock.lock();
448  try (ResultSet rs = getDataSourceIDsStmt.executeQuery()) {
449  while (rs.next()) {
450  long datasourceID = rs.getLong("datasource_id"); //NON-NLS
451  hashSet.add(datasourceID);
452  }
453  } catch (SQLException ex) {
454  LOGGER.log(Level.SEVERE, "Failed to get MAX time.", ex); // NON-NLS
455  } finally {
456  DBLock.unlock();
457  }
458  return hashSet;
459  }
460 
461  Map<Long, String> getHashSetNames() {
462  Map<Long, String> hashSets = new HashMap<>();
463  DBLock.lock();
464  try (ResultSet rs = getHashSetNamesStmt.executeQuery();) {
465  while (rs.next()) {
466  long hashSetID = rs.getLong("hash_set_id"); //NON-NLS
467  String hashSetName = rs.getString("hash_set_name"); //NON-NLS
468  hashSets.put(hashSetID, hashSetName);
469  }
470  } catch (SQLException ex) {
471  LOGGER.log(Level.SEVERE, "Failed to get hash sets.", ex); // NON-NLS
472  } finally {
473  DBLock.unlock();
474  }
475  return Collections.unmodifiableMap(hashSets);
476  }
477 
478  void analyze() {
479  DBLock.lock();
480  try (Statement createStatement = con.createStatement()) {
481  boolean b = createStatement.execute("analyze; analyze sqlite_master;"); //NON-NLS
482  } catch (SQLException ex) {
483  LOGGER.log(Level.SEVERE, "Failed to analyze events db.", ex); // NON-NLS
484  } finally {
485  DBLock.unlock();
486  }
487  }
488 
492  Long getMaxTime() {
493  DBLock.lock();
494  try (ResultSet rs = getMaxTimeStmt.executeQuery()) {
495  while (rs.next()) {
496  return rs.getLong("max"); // NON-NLS
497  }
498  } catch (SQLException ex) {
499  LOGGER.log(Level.SEVERE, "Failed to get MAX time.", ex); // NON-NLS
500  } finally {
501  DBLock.unlock();
502  }
503  return -1l;
504  }
505 
509  Long getMinTime() {
510  DBLock.lock();
511  try (ResultSet rs = getMinTimeStmt.executeQuery()) {
512  while (rs.next()) {
513  return rs.getLong("min"); // NON-NLS
514  }
515  } catch (SQLException ex) {
516  LOGGER.log(Level.SEVERE, "Failed to get MIN time.", ex); // NON-NLS
517  } finally {
518  DBLock.unlock();
519  }
520  return -1l;
521  }
522 
529  final synchronized void initializeDB() {
530 
531  try {
532  if (con == null || con.isClosed()) {
533  con = DriverManager.getConnection("jdbc:sqlite:" + dbPath); // NON-NLS
534  }
535  } catch (SQLException ex) {
536  LOGGER.log(Level.SEVERE, "Failed to open connection to events.db", ex); // NON-NLS
537  return;
538  }
539  try {
540  configureDB();
541  } catch (SQLException ex) {
542  LOGGER.log(Level.SEVERE, "problem accessing database", ex); // NON-NLS
543  return;
544  }
545 
546  DBLock.lock();
547  try {
548  try (Statement stmt = con.createStatement()) {
549  String sql = "CREATE TABLE if not exists db_info " // NON-NLS
550  + " ( key TEXT, " // NON-NLS
551  + " value INTEGER, " // NON-NLS
552  + "PRIMARY KEY (key))"; // NON-NLS
553  stmt.execute(sql);
554  } catch (SQLException ex) {
555  LOGGER.log(Level.SEVERE, "problem creating db_info table", ex); // NON-NLS
556  }
557 
558  try (Statement stmt = con.createStatement()) {
559  String sql = "CREATE TABLE if not exists events " // NON-NLS
560  + " (event_id INTEGER PRIMARY KEY, " // NON-NLS
561  + " datasource_id INTEGER, " // NON-NLS
562  + " file_id INTEGER, " // NON-NLS
563  + " artifact_id INTEGER, " // NON-NLS
564  + " time INTEGER, " // NON-NLS
565  + " sub_type INTEGER, " // NON-NLS
566  + " base_type INTEGER, " // NON-NLS
567  + " full_description TEXT, " // NON-NLS
568  + " med_description TEXT, " // NON-NLS
569  + " short_description TEXT, " // NON-NLS
570  + " known_state INTEGER," //boolean // NON-NLS
571  + " hash_hit INTEGER," //boolean // NON-NLS
572  + " tagged INTEGER)"; //boolean // NON-NLS
573  stmt.execute(sql);
574  } catch (SQLException ex) {
575  LOGGER.log(Level.SEVERE, "problem creating database table", ex); // NON-NLS
576  }
577 
578  if (hasDataSourceIDColumn() == false) {
579  try (Statement stmt = con.createStatement()) {
580  String sql = "ALTER TABLE events ADD COLUMN datasource_id INTEGER"; // NON-NLS
581  stmt.execute(sql);
582  } catch (SQLException ex) {
583  LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
584  }
585  }
586  if (hasTaggedColumn() == false) {
587  try (Statement stmt = con.createStatement()) {
588  String sql = "ALTER TABLE events ADD COLUMN tagged INTEGER"; // NON-NLS
589  stmt.execute(sql);
590  } catch (SQLException ex) {
591  LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
592  }
593  }
594 
595  if (hasHashHitColumn() == false) {
596  try (Statement stmt = con.createStatement()) {
597  String sql = "ALTER TABLE events ADD COLUMN hash_hit INTEGER"; // NON-NLS
598  stmt.execute(sql);
599  } catch (SQLException ex) {
600  LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
601  }
602  }
603 
604  try (Statement stmt = con.createStatement()) {
605  String sql = "CREATE TABLE if not exists hash_sets " //NON-NLS
606  + "( hash_set_id INTEGER primary key," //NON-NLS
607  + " hash_set_name VARCHAR(255) UNIQUE NOT NULL)"; //NON-NLS
608  stmt.execute(sql);
609  } catch (SQLException ex) {
610  LOGGER.log(Level.SEVERE, "problem creating hash_sets table", ex); //NON-NLS
611  }
612 
613  try (Statement stmt = con.createStatement()) {
614  String sql = "CREATE TABLE if not exists hash_set_hits " //NON-NLS
615  + "(hash_set_id INTEGER REFERENCES hash_sets(hash_set_id) not null, " //NON-NLS
616  + " event_id INTEGER REFERENCES events(event_id) not null, " //NON-NLS
617  + " PRIMARY KEY (hash_set_id, event_id))"; //NON-NLS
618  stmt.execute(sql);
619  } catch (SQLException ex) {
620  LOGGER.log(Level.SEVERE, "problem creating hash_set_hits table", ex); //NON-NLS
621  }
622 
624 
625  createIndex("events", Arrays.asList("datasource_id")); //NON-NLS
626  createIndex("events", Arrays.asList("event_id", "hash_hit")); //NON-NLS
627  createIndex("events", Arrays.asList("event_id", "tagged")); //NON-NLS
628  createIndex("events", Arrays.asList("file_id")); //NON-NLS
629  createIndex("events", Arrays.asList("artifact_id")); //NON-NLS
630  createIndex("events", Arrays.asList("sub_type", "short_description", "time")); //NON-NLS
631  createIndex("events", Arrays.asList("base_type", "short_description", "time")); //NON-NLS
632  createIndex("events", Arrays.asList("time")); //NON-NLS
633  createIndex("events", Arrays.asList("known_state")); //NON-NLS
634 
635  try {
636  insertRowStmt = prepareStatement(
637  "INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) " // NON-NLS
638  + "VALUES (?,?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS
639  getHashSetNamesStmt = prepareStatement("SELECT hash_set_id, hash_set_name FROM hash_sets"); // NON-NLS
640  getDataSourceIDsStmt = prepareStatement("SELECT DISTINCT datasource_id FROM events WHERE datasource_id != 0"); // NON-NLS
641  getMaxTimeStmt = prepareStatement("SELECT Max(time) AS max FROM events"); // NON-NLS
642  getMinTimeStmt = prepareStatement("SELECT Min(time) AS min FROM events"); // NON-NLS
643  getEventByIDStmt = prepareStatement("SELECT * FROM events WHERE event_id = ?"); // NON-NLS
644  insertHashSetStmt = prepareStatement("INSERT OR IGNORE INTO hash_sets (hash_set_name) values (?)"); //NON-NLS
645  selectHashSetStmt = prepareStatement("SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?"); //NON-NLS
646  insertHashHitStmt = prepareStatement("INSERT OR IGNORE INTO hash_set_hits (hash_set_id, event_id) values (?,?)"); //NON-NLS
647  insertTagStmt = prepareStatement("INSERT OR IGNORE INTO tags (tag_id, tag_name_id,tag_name_display_name, event_id) values (?,?,?,?)"); //NON-NLS
648  deleteTagStmt = prepareStatement("DELETE FROM tags WHERE tag_id = ?"); //NON-NLS
649 
650  /*
651  * This SQL query is really just a select count(*), but that has
652  * performance problems on very large tables unless you include
653  * a where clause see http://stackoverflow.com/a/9338276/4004683
654  * for more.
655  */
656  countAllEventsStmt = prepareStatement("SELECT count(event_id) AS count FROM events WHERE event_id IS NOT null"); //NON-NLS
657  dropEventsTableStmt = prepareStatement("DROP TABLE IF EXISTS events"); //NON-NLS
658  dropHashSetHitsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_set_hits"); //NON-NLS
659  dropHashSetsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_sets"); //NON-NLS
660  dropTagsTableStmt = prepareStatement("DROP TABLE IF EXISTS tags"); //NON-NLS
661  dropDBInfoTableStmt = prepareStatement("DROP TABLE IF EXISTS db_ino"); //NON-NLS
662  selectNonArtifactEventIDsByObjectIDStmt = prepareStatement("SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS NULL"); //NON-NLS
663  selectEventIDsBYObjectAndArtifactIDStmt = prepareStatement("SELECT event_id FROM events WHERE file_id == ? AND artifact_id = ?"); //NON-NLS
664  } catch (SQLException sQLException) {
665  LOGGER.log(Level.SEVERE, "failed to prepareStatment", sQLException); // NON-NLS
666  }
667  } finally {
668  DBLock.unlock();
669  }
670  }
671 
681  List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) {
682  DBLock.lock();
683 
684  String query = "SELECT event_id FROM events WHERE artifact_id == " + artifact.getArtifactID();
685 
686  ArrayList<Long> results = new ArrayList<>();
687  try (Statement stmt = con.createStatement();
688  ResultSet rs = stmt.executeQuery(query);) {
689  while (rs.next()) {
690  results.add(rs.getLong("event_id"));
691  }
692  } catch (SQLException ex) {
693  LOGGER.log(Level.SEVERE, "Error executing getEventIDsForArtifact query.", ex); // NON-NLS
694  } finally {
695  DBLock.unlock();
696  }
697  return results;
698  }
699 
715  List<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) {
716  DBLock.lock();
717 
718  String query = "SELECT event_id FROM events WHERE file_id == " + file.getId()
719  + (includeDerivedArtifacts ? "" : " AND artifact_id IS NULL");
720 
721  ArrayList<Long> results = new ArrayList<>();
722  try (Statement stmt = con.createStatement();
723  ResultSet rs = stmt.executeQuery(query);) {
724  while (rs.next()) {
725  results.add(rs.getLong("event_id"));
726  }
727  } catch (SQLException ex) {
728  LOGGER.log(Level.SEVERE, "Error executing getEventIDsForFile query.", ex); // NON-NLS
729  } finally {
730  DBLock.unlock();
731  }
732  return results;
733  }
734 
739  private void initializeTagsTable() {
740  try (Statement stmt = con.createStatement()) {
741  String sql = "CREATE TABLE IF NOT EXISTS tags " //NON-NLS
742  + "(tag_id INTEGER NOT NULL," //NON-NLS
743  + " tag_name_id INTEGER NOT NULL, " //NON-NLS
744  + " tag_name_display_name TEXT NOT NULL, " //NON-NLS
745  + " event_id INTEGER REFERENCES events(event_id) NOT NULL, " //NON-NLS
746  + " PRIMARY KEY (event_id, tag_name_id))"; //NON-NLS
747  stmt.execute(sql);
748  } catch (SQLException ex) {
749  LOGGER.log(Level.SEVERE, "problem creating tags table", ex); //NON-NLS
750  }
751  }
752 
758  private void createIndex(final String tableName, final List<String> columnList) {
759  String indexColumns = columnList.stream().collect(Collectors.joining(",", "(", ")"));
760  String indexName = tableName + "_" + StringUtils.join(columnList, "_") + "_idx"; //NON-NLS
761  try (Statement stmt = con.createStatement()) {
762 
763  String sql = "CREATE INDEX IF NOT EXISTS " + indexName + " ON " + tableName + indexColumns; // NON-NLS
764  stmt.execute(sql);
765  } catch (SQLException ex) {
766  LOGGER.log(Level.SEVERE, "problem creating index " + indexName, ex); // NON-NLS
767  }
768  }
769 
775  private boolean hasDBColumn(@Nonnull final String dbColumn) {
776  try (Statement stmt = con.createStatement()) {
777 
778  ResultSet executeQuery = stmt.executeQuery("PRAGMA table_info(events)"); //NON-NLS
779  while (executeQuery.next()) {
780  if (dbColumn.equals(executeQuery.getString("name"))) {
781  return true;
782  }
783  }
784  } catch (SQLException ex) {
785  LOGGER.log(Level.SEVERE, "problem executing pragma", ex); // NON-NLS
786  }
787  return false;
788  }
789 
790  private boolean hasDataSourceIDColumn() {
791  return hasDBColumn("datasource_id"); //NON-NLS
792  }
793 
794  private boolean hasTaggedColumn() {
795  return hasDBColumn("tagged"); //NON-NLS
796  }
797 
798  private boolean hasHashHitColumn() {
799  return hasDBColumn("hash_hit"); //NON-NLS
800  }
801 
802  void insertEvent(long time, EventType type, long datasourceID, long objID,
803  Long artifactID, String fullDescription, String medDescription,
804  String shortDescription, TskData.FileKnown known, Set<String> hashSets, List<? extends Tag> tags) {
805 
806  EventTransaction transaction = beginTransaction();
807  insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, tags, transaction);
808  commitTransaction(transaction);
809  }
810 
817  void insertEvent(long time, EventType type, long datasourceID, long objID,
818  Long artifactID, String fullDescription, String medDescription,
819  String shortDescription, TskData.FileKnown known, Set<String> hashSetNames,
820  List<? extends Tag> tags, EventTransaction transaction) {
821 
822  if (transaction.isClosed()) {
823  throw new IllegalArgumentException("can't update database with closed transaction"); // NON-NLS
824  }
825  int typeNum = RootEventType.allTypes.indexOf(type);
826  int superTypeNum = type.getSuperType().ordinal();
827 
828  DBLock.lock();
829  try {
830 
831  //"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hashHit, tagged) "
832  insertRowStmt.clearParameters();
833  insertRowStmt.setLong(1, datasourceID);
834  insertRowStmt.setLong(2, objID);
835  if (artifactID != null) {
836  insertRowStmt.setLong(3, artifactID);
837  } else {
838  insertRowStmt.setNull(3, Types.NULL);
839  }
840  insertRowStmt.setLong(4, time);
841 
842  if (typeNum != -1) {
843  insertRowStmt.setInt(5, typeNum);
844  } else {
845  insertRowStmt.setNull(5, Types.INTEGER);
846  }
847 
848  insertRowStmt.setInt(6, superTypeNum);
849  insertRowStmt.setString(7, fullDescription);
850  insertRowStmt.setString(8, medDescription);
851  insertRowStmt.setString(9, shortDescription);
852 
853  insertRowStmt.setByte(10, known == null ? TskData.FileKnown.UNKNOWN.getFileKnownValue() : known.getFileKnownValue());
854 
855  insertRowStmt.setInt(11, hashSetNames.isEmpty() ? 0 : 1);
856  insertRowStmt.setInt(12, tags.isEmpty() ? 0 : 1);
857 
858  insertRowStmt.executeUpdate();
859 
860  try (ResultSet generatedKeys = insertRowStmt.getGeneratedKeys()) {
861  while (generatedKeys.next()) {
862  long eventID = generatedKeys.getLong("last_insert_rowid()"); //NON-NLS
863  for (String name : hashSetNames) {
864 
865  // "insert or ignore into hash_sets (hash_set_name) values (?)"
866  insertHashSetStmt.setString(1, name);
867  insertHashSetStmt.executeUpdate();
868 
869  //TODO: use nested select to get hash_set_id rather than seperate statement/query ?
870  //"select hash_set_id from hash_sets where hash_set_name = ?"
871  selectHashSetStmt.setString(1, name);
872  try (ResultSet rs = selectHashSetStmt.executeQuery()) {
873  while (rs.next()) {
874  int hashsetID = rs.getInt("hash_set_id"); //NON-NLS
875  //"insert or ignore into hash_set_hits (hash_set_id, obj_id) values (?,?)";
876  insertHashHitStmt.setInt(1, hashsetID);
877  insertHashHitStmt.setLong(2, eventID);
878  insertHashHitStmt.executeUpdate();
879  break;
880  }
881  }
882  }
883  for (Tag tag : tags) {
884  //could this be one insert? is there a performance win?
885  insertTag(tag, eventID);
886  }
887  break;
888  }
889  }
890 
891  } catch (SQLException ex) {
892  LOGGER.log(Level.SEVERE, "failed to insert event", ex); // NON-NLS
893  } finally {
894  DBLock.unlock();
895  }
896  }
897 
911  Set<Long> addTag(long objectID, @Nullable Long artifactID, Tag tag, EventTransaction transaction) {
912  if (transaction != null && transaction.isClosed()) {
913  throw new IllegalArgumentException("can't update database with closed transaction"); // NON-NLS
914  }
915  DBLock.lock();
916  try {
917  Set<Long> eventIDs = markEventsTagged(objectID, artifactID, true);
918  for (Long eventID : eventIDs) {
919  insertTag(tag, eventID);
920  }
921  return eventIDs;
922  } catch (SQLException ex) {
923  LOGGER.log(Level.SEVERE, "failed to add tag to event", ex); // NON-NLS
924  } finally {
925  DBLock.unlock();
926  }
927  return Collections.emptySet();
928  }
929 
941  private void insertTag(Tag tag, long eventID) throws SQLException {
942 
943  //"INSERT OR IGNORE INTO tags (tag_id, tag_name_id,tag_name_display_name, event_id) values (?,?,?,?)"
944  insertTagStmt.clearParameters();
945  insertTagStmt.setLong(1, tag.getId());
946  insertTagStmt.setLong(2, tag.getName().getId());
947  insertTagStmt.setString(3, tag.getName().getDisplayName());
948  insertTagStmt.setLong(4, eventID);
949  insertTagStmt.executeUpdate();
950  }
951 
967  Set<Long> deleteTag(long objectID, @Nullable Long artifactID, long tagID, boolean stillTagged) {
968  DBLock.lock();
969  try {
970  //"DELETE FROM tags WHERE tag_id = ?
971  deleteTagStmt.clearParameters();
972  deleteTagStmt.setLong(1, tagID);
973  deleteTagStmt.executeUpdate();
974 
975  return markEventsTagged(objectID, artifactID, stillTagged);
976  } catch (SQLException ex) {
977  LOGGER.log(Level.SEVERE, "failed to add tag to event", ex); // NON-NLS
978  } finally {
979  DBLock.unlock();
980  }
981  return Collections.emptySet();
982  }
983 
1004  private Set<Long> markEventsTagged(long objectID, @Nullable Long artifactID, boolean tagged) throws SQLException {
1005 
1006  PreparedStatement selectStmt;
1007  if (Objects.isNull(artifactID)) {
1008  //"SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS NULL"
1009  selectNonArtifactEventIDsByObjectIDStmt.clearParameters();
1010  selectNonArtifactEventIDsByObjectIDStmt.setLong(1, objectID);
1012  } else {
1013  //"SELECT event_id FROM events WHERE file_id == ? AND artifact_id = ?"
1014  selectEventIDsBYObjectAndArtifactIDStmt.clearParameters();
1015  selectEventIDsBYObjectAndArtifactIDStmt.setLong(1, objectID);
1016  selectEventIDsBYObjectAndArtifactIDStmt.setLong(2, artifactID);
1018  }
1019 
1020  HashSet<Long> eventIDs = new HashSet<>();
1021  try (ResultSet executeQuery = selectStmt.executeQuery();) {
1022  while (executeQuery.next()) {
1023  eventIDs.add(executeQuery.getLong("event_id")); //NON-NLS
1024  }
1025  }
1026 
1027  //update tagged state for all event with selected ids
1028  try (Statement updateStatement = con.createStatement();) {
1029  updateStatement.executeUpdate("UPDATE events SET tagged = " + (tagged ? 1 : 0) //NON-NLS
1030  + " WHERE event_id IN (" + StringUtils.join(eventIDs, ",") + ")"); //NON-NLS
1031  }
1032 
1033  return eventIDs;
1034  }
1035 
1036  void rollBackTransaction(EventTransaction trans) {
1037  trans.rollback();
1038  }
1039 
1040  private void closeStatements() throws SQLException {
1041  for (PreparedStatement pStmt : preparedStatements) {
1042  pStmt.close();
1043  }
1044  }
1045 
1046  private void configureDB() throws SQLException {
1047  DBLock.lock();
1048  //this should match Sleuthkit db setup
1049  try (Statement statement = con.createStatement()) {
1050  //reduce i/o operations, we have no OS crash recovery anyway
1051  statement.execute("PRAGMA synchronous = OFF;"); // NON-NLS
1052  //we don't use this feature, so turn it off for minimal speed up on queries
1053  //this is deprecated and not recomended
1054  statement.execute("PRAGMA count_changes = OFF;"); // NON-NLS
1055  //this made a big difference to query speed
1056  statement.execute("PRAGMA temp_store = MEMORY"); // NON-NLS
1057  //this made a modest improvement in query speeds
1058  statement.execute("PRAGMA cache_size = 50000"); // NON-NLS
1059  //we never delete anything so...
1060  statement.execute("PRAGMA auto_vacuum = 0"); // NON-NLS
1061  //allow to query while in transaction - no need read locks
1062  statement.execute("PRAGMA read_uncommitted = True;"); // NON-NLS
1063  } finally {
1064  DBLock.unlock();
1065  }
1066 
1067  try {
1068  LOGGER.log(Level.INFO, String.format("sqlite-jdbc version %s loaded in %s mode", // NON-NLS
1069  SQLiteJDBCLoader.getVersion(), SQLiteJDBCLoader.isNativeMode() ? "native" : "pure-java")); // NON-NLS
1070  } catch (Exception exception) {
1071  LOGGER.log(Level.SEVERE, "Failed to determine if sqlite-jdbc is loaded in native or pure-java mode.", exception); //NON-NLS
1072  }
1073  }
1074 
1075  private SingleEvent constructTimeLineEvent(ResultSet rs) throws SQLException {
1076  return new SingleEvent(rs.getLong("event_id"), //NON-NLS
1077  rs.getLong("datasource_id"), //NON-NLS
1078  rs.getLong("file_id"), //NON-NLS
1079  rs.getLong("artifact_id"), //NON-NLS
1080  rs.getLong("time"), RootEventType.allTypes.get(rs.getInt("sub_type")), //NON-NLS
1081  rs.getString("full_description"), //NON-NLS
1082  rs.getString("med_description"), //NON-NLS
1083  rs.getString("short_description"), //NON-NLS
1084  TskData.FileKnown.valueOf(rs.getByte("known_state")), //NON-NLS
1085  rs.getInt("hash_hit") != 0, //NON-NLS
1086  rs.getInt("tagged") != 0); //NON-NLS
1087  }
1088 
1105  private Map<EventType, Long> countEventsByType(Long startTime, Long endTime, RootFilter filter, EventTypeZoomLevel zoomLevel) {
1106  if (Objects.equals(startTime, endTime)) {
1107  endTime++;
1108  }
1109 
1110  Map<EventType, Long> typeMap = new HashMap<>();
1111 
1112  //do we want the root or subtype column of the databse
1113  final boolean useSubTypes = (zoomLevel == EventTypeZoomLevel.SUB_TYPE);
1114 
1115  //get some info about the range of dates requested
1116  final String queryString = "SELECT count(DISTINCT events.event_id) AS count, " + typeColumnHelper(useSubTypes) //NON-NLS
1117  + " FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + startTime + " AND time < " + endTime + " AND " + SQLHelper.getSQLWhere(filter) // NON-NLS
1118  + " GROUP BY " + typeColumnHelper(useSubTypes); // NON-NLS
1119 
1120  DBLock.lock();
1121  try (Statement stmt = con.createStatement();
1122  ResultSet rs = stmt.executeQuery(queryString);) {
1123  while (rs.next()) {
1124  EventType type = useSubTypes
1125  ? RootEventType.allTypes.get(rs.getInt("sub_type")) //NON-NLS
1126  : BaseTypes.values()[rs.getInt("base_type")]; //NON-NLS
1127 
1128  typeMap.put(type, rs.getLong("count")); // NON-NLS
1129  }
1130 
1131  } catch (Exception ex) {
1132  LOGGER.log(Level.SEVERE, "Error getting count of events from db.", ex); // NON-NLS
1133  } finally {
1134  DBLock.unlock();
1135  }
1136  return typeMap;
1137  }
1138 
1150  List<EventStripe> getEventStripes(ZoomParams params) {
1151  //unpack params
1152  Interval timeRange = params.getTimeRange();
1153  RootFilter filter = params.getFilter();
1154  DescriptionLoD descriptionLOD = params.getDescriptionLOD();
1155  EventTypeZoomLevel typeZoomLevel = params.getTypeZoomLevel();
1156 
1157  long start = timeRange.getStartMillis() / 1000;
1158  long end = timeRange.getEndMillis() / 1000;
1159 
1160  //ensure length of querried interval is not 0
1161  end = Math.max(end, start + 1);
1162 
1163  //get some info about the time range requested
1165 
1166  //build dynamic parts of query
1167  String strfTimeFormat = SQLHelper.getStrfTimeFormat(rangeInfo.getPeriodSize());
1168  String descriptionColumn = SQLHelper.getDescriptionColumn(descriptionLOD);
1169  final boolean useSubTypes = typeZoomLevel.equals(EventTypeZoomLevel.SUB_TYPE);
1170  String timeZone = TimeLineController.getTimeZone().get().equals(TimeZone.getDefault()) ? ", 'localtime'" : ""; // NON-NLS
1171  String typeColumn = typeColumnHelper(useSubTypes);
1172 
1173  //compose query string, the new-lines are only for nicer formatting if printing the entire query
1174  String query = "SELECT strftime('" + strfTimeFormat + "',time , 'unixepoch'" + timeZone + ") AS interval," // NON-NLS
1175  + "\n group_concat(events.event_id) as event_ids," //NON-NLS
1176  + "\n group_concat(CASE WHEN hash_hit = 1 THEN events.event_id ELSE NULL END) as hash_hits," //NON-NLS
1177  + "\n group_concat(CASE WHEN tagged = 1 THEN events.event_id ELSE NULL END) as taggeds," //NON-NLS
1178  + "\n min(time), max(time), " + typeColumn + ", " + descriptionColumn // NON-NLS
1179  + "\n FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) // NON-NLS
1180  + "\n WHERE time >= " + start + " AND time < " + end + " AND " + SQLHelper.getSQLWhere(filter) // NON-NLS
1181  + "\n GROUP BY interval, " + typeColumn + " , " + descriptionColumn // NON-NLS
1182  + "\n ORDER BY min(time)"; // NON-NLS
1183 
1184  switch (Version.getBuildType()) {
1185  case DEVELOPMENT:
1186 // LOGGER.log(Level.INFO, "executing timeline query: {0}", query); //NON-NLS
1187  break;
1188  case RELEASE:
1189  default:
1190  }
1191 
1192  // perform query and map results to AggregateEvent objects
1193  List<EventCluster> events = new ArrayList<>();
1194 
1195  DBLock.lock();
1196  try (Statement createStatement = con.createStatement();
1197  ResultSet rs = createStatement.executeQuery(query)) {
1198  while (rs.next()) {
1199  events.add(eventClusterHelper(rs, useSubTypes, descriptionLOD, filter.getTagsFilter()));
1200  }
1201  } catch (SQLException ex) {
1202  LOGGER.log(Level.SEVERE, "Failed to get events with query: " + query, ex); // NON-NLS
1203  } finally {
1204  DBLock.unlock();
1205  }
1206 
1207  return mergeClustersToStripes(rangeInfo.getPeriodSize().getPeriod(), events);
1208  }
1209 
1224  private EventCluster eventClusterHelper(ResultSet rs, boolean useSubTypes, DescriptionLoD descriptionLOD, TagsFilter filter) throws SQLException {
1225  Interval interval = new Interval(rs.getLong("min(time)") * 1000, rs.getLong("max(time)") * 1000, TimeLineController.getJodaTimeZone());// NON-NLS
1226  String eventIDsString = rs.getString("event_ids");// NON-NLS
1227  List<Long> eventIDs = SQLHelper.unGroupConcat(eventIDsString, Long::valueOf);
1228  String description = rs.getString(SQLHelper.getDescriptionColumn(descriptionLOD));
1229  EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")];// NON-NLS
1230 
1231  List<Long> hashHits = SQLHelper.unGroupConcat(rs.getString("hash_hits"), Long::valueOf); //NON-NLS
1232  List<Long> tagged = SQLHelper.unGroupConcat(rs.getString("taggeds"), Long::valueOf); //NON-NLS
1233 
1234  return new EventCluster(interval, type, eventIDs, hashHits, tagged, description, descriptionLOD);
1235  }
1236 
1250  static private List<EventStripe> mergeClustersToStripes(Period timeUnitLength, List<EventCluster> preMergedEvents) {
1251 
1252  //effectively map from type to (map from description to events)
1253  Map<EventType, SetMultimap< String, EventCluster>> typeMap = new HashMap<>();
1254 
1255  for (EventCluster aggregateEvent : preMergedEvents) {
1256  typeMap.computeIfAbsent(aggregateEvent.getEventType(), eventType -> HashMultimap.create())
1257  .put(aggregateEvent.getDescription(), aggregateEvent);
1258  }
1259  //result list to return
1260  ArrayList<EventCluster> aggEvents = new ArrayList<>();
1261 
1262  //For each (type, description) key, merge agg events
1263  for (SetMultimap<String, EventCluster> descrMap : typeMap.values()) {
1264  //for each description ...
1265  for (String descr : descrMap.keySet()) {
1266  //run through the sorted events, merging together adjacent events
1267  Iterator<EventCluster> iterator = descrMap.get(descr).stream()
1268  .sorted(Comparator.comparing(event -> event.getSpan().getStartMillis()))
1269  .iterator();
1270  EventCluster current = iterator.next();
1271  while (iterator.hasNext()) {
1272  EventCluster next = iterator.next();
1273  Interval gap = current.getSpan().gap(next.getSpan());
1274 
1275  //if they overlap or gap is less one quarter timeUnitLength
1276  //TODO: 1/4 factor is arbitrary. review! -jm
1277  if (gap == null || gap.toDuration().getMillis() <= timeUnitLength.toDurationFrom(gap.getStart()).getMillis() / 4) {
1278  //merge them
1279  current = EventCluster.merge(current, next);
1280  } else {
1281  //done merging into current, set next as new current
1282  aggEvents.add(current);
1283  current = next;
1284  }
1285  }
1286  aggEvents.add(current);
1287  }
1288  }
1289 
1290  //merge clusters to stripes
1291  Map<ImmutablePair<EventType, String>, EventStripe> stripeDescMap = new HashMap<>();
1292 
1293  for (EventCluster eventCluster : aggEvents) {
1294  stripeDescMap.merge(ImmutablePair.of(eventCluster.getEventType(), eventCluster.getDescription()),
1295  new EventStripe(eventCluster), EventStripe::merge);
1296  }
1297 
1298  return stripeDescMap.values().stream().sorted(Comparator.comparing(EventStripe::getStartMillis)).collect(Collectors.toList());
1299  }
1300 
1301  private static String typeColumnHelper(final boolean useSubTypes) {
1302  return useSubTypes ? "sub_type" : "base_type"; //NON-NLS
1303  }
1304 
1305  private PreparedStatement prepareStatement(String queryString) throws SQLException {
1306  PreparedStatement prepareStatement = con.prepareStatement(queryString);
1307  preparedStatements.add(prepareStatement);
1308  return prepareStatement;
1309  }
1310 
1314  public class EventTransaction {
1315 
1316  private boolean closed = false;
1317 
1325  private EventTransaction() {
1326 
1327  //get the write lock, released in close()
1328  DBLock.lock();
1329  try {
1330  con.setAutoCommit(false);
1331  } catch (SQLException ex) {
1332  LOGGER.log(Level.SEVERE, "failed to set auto-commit to to false", ex); // NON-NLS
1333  }
1334 
1335  }
1336 
1337  private void rollback() {
1338  if (!closed) {
1339  try {
1340  con.rollback();
1341 
1342  } catch (SQLException ex1) {
1343  LOGGER.log(Level.SEVERE, "Exception while attempting to rollback!!", ex1); // NON-NLS
1344  } finally {
1345  close();
1346  }
1347  }
1348  }
1349 
1350  private void commit() {
1351  if (!closed) {
1352  try {
1353  con.commit();
1354  // make sure we close before we update, bc they'll need locks
1355  close();
1356 
1357  } catch (SQLException ex) {
1358  LOGGER.log(Level.SEVERE, "Error commiting events.db.", ex); // NON-NLS
1359  rollback();
1360  }
1361  }
1362  }
1363 
1364  private void close() {
1365  if (!closed) {
1366  try {
1367  con.setAutoCommit(true);
1368  } catch (SQLException ex) {
1369  LOGGER.log(Level.SEVERE, "Error setting auto-commit to true.", ex); // NON-NLS
1370  } finally {
1371  closed = true;
1372 
1373  DBLock.unlock();
1374  }
1375  }
1376  }
1377 
1378  public Boolean isClosed() {
1379  return closed;
1380  }
1381  }
1382 }
static Version.Type getBuildType()
Definition: Version.java:87
void insertTag(Tag tag, long eventID)
Definition: EventDB.java:941
static List< EventStripe > mergeClustersToStripes(Period timeUnitLength, List< EventCluster > preMergedEvents)
Definition: EventDB.java:1250
static EventDB getEventDB(Case autoCase)
Definition: EventDB.java:110
boolean hasDBColumn(@Nonnull final String dbColumn)
Definition: EventDB.java:775
SingleEvent constructTimeLineEvent(ResultSet rs)
Definition: EventDB.java:1075
Interval getSpanningInterval(Collection< Long > eventIDs)
Definition: EventDB.java:177
PreparedStatement selectEventIDsBYObjectAndArtifactIDStmt
Definition: EventDB.java:144
static ReadOnlyObjectProperty< TimeZone > getTimeZone()
Map< EventType, Long > countEventsByType(Long startTime, Long endTime, RootFilter filter, EventTypeZoomLevel zoomLevel)
Definition: EventDB.java:1105
PreparedStatement selectNonArtifactEventIDsByObjectIDStmt
Definition: EventDB.java:143
static String typeColumnHelper(final boolean useSubTypes)
Definition: EventDB.java:1301
EventCluster eventClusterHelper(ResultSet rs, boolean useSubTypes, DescriptionLoD descriptionLOD, TagsFilter filter)
Definition: EventDB.java:1224
PreparedStatement prepareStatement(String queryString)
Definition: EventDB.java:1305
final Set< PreparedStatement > preparedStatements
Definition: EventDB.java:146
static RangeDivisionInfo getRangeDivisionInfo(Interval timeRange)
static final List<?extends EventType > allTypes
Definition: EventType.java:35
static EventStripe merge(EventStripe u, EventStripe v)
void createIndex(final String tableName, final List< String > columnList)
Definition: EventDB.java:758
static final org.sleuthkit.autopsy.coreutils.Logger LOGGER
Definition: EventDB.java:90
synchronized static Logger getLogger(String name)
Definition: Logger.java:161
Set< Long > markEventsTagged(long objectID,@Nullable Long artifactID, boolean tagged)
Definition: EventDB.java:1004
static EventCluster merge(EventCluster cluster1, EventCluster cluster2)

Copyright © 2012-2016 Basis Technology. Generated on: Tue Oct 25 2016
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.