Sleuth Kit Java Bindings (JNI)  4.11.0
Java bindings for using The Sleuth Kit
ScoringManager.java
Go to the documentation of this file.
1 /*
2  * Sleuth Kit Data Model
3  *
4  * Copyright 2020 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.datamodel;
20 
21 import java.sql.PreparedStatement;
22 import java.sql.ResultSet;
23 import java.sql.SQLException;
24 import java.sql.Statement;
25 import java.util.ArrayList;
26 import java.util.Collections;
27 import java.util.List;
28 import java.util.Optional;
29 import java.util.Map;
30 import java.util.logging.Logger;
31 import java.util.stream.Collectors;
36 
42 public class ScoringManager {
43 
44  private static final Logger LOGGER = Logger.getLogger(ScoringManager.class.getName());
45 
46  private final SleuthkitCase db;
47 
55  this.db = skCase;
56  }
57 
67  public Score getAggregateScore(long objId) throws TskCoreException {
69  try (CaseDbConnection connection = db.getConnection()) {
70  return getAggregateScore(objId, connection);
71  } finally {
73  }
74  }
75 
86  public Map<Long, Score> getAggregateScores(List<Long> objIds) throws TskCoreException {
87 
88  if (objIds.isEmpty()) {
89  return Collections.emptyMap();
90  }
91 
92  String queryString = "SELECT obj_id, significance, priority FROM tsk_aggregate_score WHERE obj_id in "
93  + objIds.stream().map(l -> l.toString()).collect(Collectors.joining(",", "(", ")"));
94 
95  Map<Long, Score> results = objIds.stream().collect(Collectors.toMap( key -> key, key -> Score.SCORE_UNKNOWN));
97  try (CaseDbConnection connection = db.getConnection()) {
98  try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) {
99  while (rs.next()) {
100  Long objId = rs.getLong("obj_id");
101  Score score = new Score(Significance.fromID(rs.getInt("significance")), Priority.fromID(rs.getInt("priority")));
102  results.put(objId, score);
103  }
104  } catch (SQLException ex) {
105  throw new TskCoreException("SQLException thrown while running query: " + queryString, ex);
106  }
107  } finally {
109  }
110  return results;
111  }
112 
113 
125  private Score getAggregateScore(long objId, CaseDbTransaction transaction) throws TskCoreException {
126  CaseDbConnection connection = transaction.getConnection();
127  return getAggregateScore(objId, connection);
128  }
129 
140  private Score getAggregateScore(long objId, CaseDbConnection connection) throws TskCoreException {
141  String queryString = "SELECT significance, priority FROM tsk_aggregate_score WHERE obj_id = " + objId;
142  try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) {
143  if (rs.next()) {
144  return new Score(Significance.fromID(rs.getInt("significance")), Priority.fromID(rs.getInt("priority")));
145  } else {
146  return Score.SCORE_UNKNOWN;
147  }
148  } catch (SQLException ex) {
149  throw new TskCoreException("SQLException thrown while running query: " + queryString, ex);
150  }
151  }
152 
153 
164  private void setAggregateScore(long objId, Long dataSourceObjectId, Score score, CaseDbTransaction transaction) throws TskCoreException {
165 
166  String insertSQLString = "INSERT INTO tsk_aggregate_score (obj_id, data_source_obj_id, significance , priority) VALUES (?, ?, ?, ?)"
167  + " ON CONFLICT (obj_id) DO UPDATE SET significance = ?, priority = ?";
168 
169  CaseDbConnection connection = transaction.getConnection();
170  try {
171  PreparedStatement preparedStatement = connection.getPreparedStatement(insertSQLString, Statement.NO_GENERATED_KEYS);
172  preparedStatement.clearParameters();
173 
174  preparedStatement.setLong(1, objId);
175  if (dataSourceObjectId != null) {
176  preparedStatement.setLong(2, dataSourceObjectId);
177  } else {
178  preparedStatement.setNull(2, java.sql.Types.NULL);
179  }
180  preparedStatement.setInt(3, score.getSignificance().getId());
181  preparedStatement.setInt(4, score.getPriority().getId());
182 
183  preparedStatement.setInt(5, score.getSignificance().getId());
184  preparedStatement.setInt(6, score.getPriority().getId());
185 
186  connection.executeUpdate(preparedStatement);
187  } catch (SQLException ex) {
188  throw new TskCoreException(String.format("Error updating aggregate score, query: %s for objId = %d", insertSQLString, objId), ex);//NON-NLS
189  }
190 
191  }
192 
193 
194 
209  Score updateAggregateScoreAfterAddition(long objId, Long dataSourceObjectId, Score newResultScore, CaseDbTransaction transaction) throws TskCoreException {
210 
211  /* get an exclusive write lock on the DB before we read anything so that we know we are
212  * the only one reading existing scores and updating. The risk is that two computers
213  * could update the score and the aggregate score ends up being incorrect.
214  *
215  * NOTE: The alternative design is to add a 'version' column for opportunistic locking
216  * and calculate these outside of a transaction. We opted for table locking for performance
217  * reasons so that we can still add the analysis results in a batch. That remains an option
218  * if we get into deadlocks with the current design.
219  */
220  try {
221  CaseDbConnection connection = transaction.getConnection();
222  connection.getAggregateScoreTableWriteLock();
223  } catch (SQLException ex) {
224  throw new TskCoreException("Error getting exclusive write lock on aggregate score table", ex);//NON-NLS
225  }
226 
227 
228  // Get the current score
229  Score currentAggregateScore = ScoringManager.this.getAggregateScore(objId, transaction);
230 
231  // If current score is Unknown And newscore is not Unknown - allow None (good) to be recorded
232  // or if the new score is higher than the current score
233  if ( (currentAggregateScore.compareTo(Score.SCORE_UNKNOWN) == 0 && newResultScore.compareTo(Score.SCORE_UNKNOWN) != 0)
234  || (Score.getScoreComparator().compare(newResultScore, currentAggregateScore) > 0)) {
235  setAggregateScore(objId, dataSourceObjectId, newResultScore, transaction);
236 
237  // register score change in the transaction.
238  transaction.registerScoreChange(new ScoreChange(objId, dataSourceObjectId, currentAggregateScore, newResultScore));
239  return newResultScore;
240  } else {
241  // return the current score
242  return currentAggregateScore;
243  }
244  }
245 
256  Score updateAggregateScoreAfterDeletion(long objId, Long dataSourceObjectId, CaseDbTransaction transaction) throws TskCoreException {
257 
258  CaseDbConnection connection = transaction.getConnection();
259 
260  /* get an exclusive write lock on the DB before we read anything so that we know we are
261  * the only one reading existing scores and updating. The risk is that two computers
262  * could update the score and the aggregate score ends up being incorrect.
263  *
264  * NOTE: The alternative design is to add a 'version' column for opportunistic locking
265  * and calculate these outside of a transaction. We opted for table locking for performance
266  * reasons so that we can still add the analysis results in a batch. That remains an option
267  * if we get into deadlocks with the current design.
268  */
269  try {
270  connection.getAggregateScoreTableWriteLock();
271  } catch (SQLException ex) {
272  throw new TskCoreException("Error getting exclusive write lock on aggregate score table", ex);//NON-NLS
273  }
274 
275  // Get the current score
276  Score currentScore = ScoringManager.this.getAggregateScore(objId, transaction);
277 
278  // Calculate the score from scratch by getting all of them and getting the highest
279  List<AnalysisResult> analysisResults = db.getBlackboard().getAnalysisResults(objId, connection);
280  Score newScore = Score.SCORE_UNKNOWN;
281  for (AnalysisResult iter : analysisResults) {
282  Score iterScore = iter.getScore();
283  if (Score.getScoreComparator().compare(iterScore, newScore) > 0) {
284  newScore = iterScore;
285  }
286  }
287 
288  // get the maximum score of the calculated aggregate score of analysis results
289  // or the score derived from the maximum known status of a content tag on this content.
290  Optional<Score> tagScore = db.getTaggingManager().getMaxTagKnownStatus(objId, transaction)
291  .map(knownStatus -> TaggingManager.getTagScore(knownStatus));
292 
293  if (tagScore.isPresent() && Score.getScoreComparator().compare(tagScore.get(), newScore) > 0) {
294  newScore = tagScore.get();
295  }
296 
297  // only change the DB if we got a new score.
298  if (newScore.compareTo(currentScore) != 0) {
299  setAggregateScore(objId, dataSourceObjectId, newScore, transaction);
300 
301  // register the score change with the transaction so an event can be fired for it.
302  transaction.registerScoreChange(new ScoreChange(objId, dataSourceObjectId, currentScore, newScore));
303  }
304  return newScore;
305  }
306 
317  public long getContentCount(long dataSourceObjectId, Score.Significance significance) throws TskCoreException {
319  try (CaseDbConnection connection = db.getConnection()) {
320  return getContentCount(dataSourceObjectId, significance, connection);
321  } finally {
323  }
324  }
325 
326 
339  private long getContentCount(long dataSourceObjectId, Score.Significance significance, CaseDbConnection connection) throws TskCoreException {
340  String queryString = "SELECT COUNT(obj_id) AS count FROM tsk_aggregate_score"
341  + " WHERE data_source_obj_id = " + dataSourceObjectId
342  + " AND significance = " + significance.getId();
343 
344  try (Statement statement = connection.createStatement();
345  ResultSet resultSet = connection.executeQuery(statement, queryString);) {
346 
347  long count = 0;
348  if (resultSet.next()) {
349  count = resultSet.getLong("count");
350  }
351  return count;
352  } catch (SQLException ex) {
353  throw new TskCoreException("Error getting count of items with significance = " + significance.toString(), ex);
354  }
355  }
356 
367  public List<Content> getContent(long dataSourceObjectId, Score.Significance significance) throws TskCoreException {
369  try (CaseDbConnection connection = db.getConnection()) {
370  return getContent(dataSourceObjectId, significance, connection);
371  } finally {
373  }
374  }
375 
388  private List<Content> getContent(long dataSourceObjectId, Score.Significance significance, CaseDbConnection connection) throws TskCoreException {
389  String queryString = "SELECT obj_id FROM tsk_aggregate_score"
390  + " WHERE data_source_obj_id = " + dataSourceObjectId
391  + " AND significance = " + significance.getId();
392 
393  try (Statement statement = connection.createStatement();
394  ResultSet resultSet = connection.executeQuery(statement, queryString);) {
395 
396  List<Content> items = new ArrayList<>();
397  while (resultSet.next()) {
398  long objId = resultSet.getLong("obj_id");
399  items.add(db.getContentById(objId));
400  }
401  return items;
402  } catch (SQLException ex) {
403  throw new TskCoreException("Error getting list of items with significance = " + significance.toString(), ex);
404  }
405  }
406 }
static Priority fromID(int id)
Definition: Score.java:184
static Significance fromID(int id)
Definition: Score.java:118
List< Content > getContent(long dataSourceObjectId, Score.Significance significance)
static final Score SCORE_UNKNOWN
Definition: Score.java:213
Map< Long, Score > getAggregateScores(List< Long > objIds)
List< AnalysisResult > getAnalysisResults(long sourceObjId)
synchronized TaggingManager getTaggingManager()
long getContentCount(long dataSourceObjectId, Score.Significance significance)

Copyright © 2011-2021 Brian Carrier. (carrier -at- sleuthkit -dot- org)
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.