Autopsy  4.1
Graphical digital forensics platform for The Sleuth Kit and other tools.
RegexQuery.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2011-2017 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.keywordsearch;
20 
21 import com.google.common.base.CharMatcher;
22 import com.google.common.collect.ArrayListMultimap;
23 import com.google.common.collect.ListMultimap;
24 import java.util.ArrayList;
25 import java.util.Collection;
26 import java.util.HashMap;
27 import java.util.HashSet;
28 import java.util.List;
29 import java.util.Map;
30 import java.util.Set;
31 import java.util.logging.Level;
32 import java.util.regex.Matcher;
33 import java.util.regex.Pattern;
34 import org.apache.commons.lang.StringUtils;
35 import org.apache.commons.validator.routines.DomainValidator;
36 import org.apache.solr.client.solrj.SolrQuery;
37 import org.apache.solr.client.solrj.SolrQuery.SortClause;
38 import org.apache.solr.client.solrj.SolrRequest;
39 import org.apache.solr.client.solrj.response.QueryResponse;
40 import org.apache.solr.common.SolrDocument;
41 import org.apache.solr.common.SolrDocumentList;
42 import org.apache.solr.common.params.CursorMarkParams;
43 import org.openide.util.NbBundle;
47 import static org.sleuthkit.autopsy.keywordsearch.KeywordSearchSettings.MODULE_NAME;
48 import static org.sleuthkit.autopsy.keywordsearch.TermsComponentQuery.CREDIT_CARD_NUM_PATTERN;
49 import static org.sleuthkit.autopsy.keywordsearch.TermsComponentQuery.CREDIT_CARD_TRACK2_PATTERN;
50 import static org.sleuthkit.autopsy.keywordsearch.TermsComponentQuery.KEYWORD_SEARCH_DOCUMENT_ID;
60 
75 final class RegexQuery implements KeywordSearchQuery {
76 
77  public static final Logger LOGGER = Logger.getLogger(RegexQuery.class.getName());
78  private final List<KeywordQueryFilter> filters = new ArrayList<>();
79 
80  private final KeywordList keywordList;
81  private final Keyword originalKeyword; // The regular expression originalKeyword used to perform the search.
82  private String field = Server.Schema.CONTENT_STR.toString();
83  private final String keywordString;
84  static final private int MAX_RESULTS_PER_CURSOR_MARK = 512;
85  private boolean escaped;
86  private String escapedQuery;
87 
88  private final int MIN_EMAIL_ADDR_LENGTH = 8;
89 
90  private final ListMultimap<Keyword, KeywordHit> hitsMultiMap = ArrayListMultimap.create();
91 
92  // Lucene regular expressions do not support the following Java predefined
93  // and POSIX character classes. There are other valid Java character classes
94  // that are not supported by Lucene but we do not check for all of them.
95  // See https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html
96  // for Java regex syntax.
97  // See https://lucene.apache.org/core/6_4_0/core/org/apache/lucene/util/automaton/RegExp.html
98  // for Lucene syntax.
99  // We use \p as a shortcut for all of the character classes of the form \p{XXX}.
100  private static final CharSequence[] UNSUPPORTED_CHARS = {"\\d", "\\D", "\\w", "\\W", "\\s", "\\S", "\\n",
101  "\\t", "\\r", "\\f", "\\a", "\\e", "\\v", "\\V", "\\h", "\\H", "\\p"}; //NON-NLS
102 
103  private boolean queryStringContainsWildcardPrefix = false;
104  private boolean queryStringContainsWildcardSuffix = false;
105 
112  RegexQuery(KeywordList keywordList, Keyword keyword) {
113  this.keywordList = keywordList;
114  this.originalKeyword = keyword;
115  this.keywordString = keyword.getSearchTerm();
116 
117  if (this.keywordString.startsWith(".*")) {
118  this.queryStringContainsWildcardPrefix = true;
119  }
120 
121  if (this.keywordString.endsWith(".*")) {
122  this.queryStringContainsWildcardSuffix = true;
123  }
124  }
125 
126  @Override
127  public KeywordList getKeywordList() {
128  return keywordList;
129  }
130 
131  @Override
132  public boolean validate() {
133  if (keywordString.isEmpty()) {
134  return false;
135  }
136  try {
137  // First we perform regular Java regex validation to catch errors.
138  Pattern.compile(keywordString, Pattern.UNICODE_CHARACTER_CLASS);
139 
140  // Then we check for the set of Java predefined and POSIX character
141  // classes. While they are valid Lucene regex characters, they will
142  // behave differently than users may expect. E.g. the regex \d\d\d
143  // will not find 3 digits but will instead find a sequence of 3 'd's.
144  for (CharSequence c : UNSUPPORTED_CHARS) {
145  if (keywordString.contains(c)) {
146  return false;
147  }
148  }
149  return true;
150  } catch (IllegalArgumentException ex) {
151  return false;
152  }
153  }
154 
155  @Override
156  public QueryResults performQuery() throws NoOpenCoreException {
157 
158  final Server solrServer = KeywordSearch.getServer();
159  SolrQuery solrQuery = new SolrQuery();
160 
175  // We construct the query by surrounding it with slashes (to indicate it is
176  // a regular expression search) and .* as anchors (if the query doesn't
177  // already have them).
178  solrQuery.setQuery((field == null ? Server.Schema.CONTENT_STR.toString() : field) + ":/"
179  + (queryStringContainsWildcardPrefix ? "" : ".*") + getQueryString()
180  + (queryStringContainsWildcardSuffix ? "" : ".*") + "/");
181 
182  // Set the fields we want to have returned by the query.
183  solrQuery.setFields(Server.Schema.CONTENT_STR.toString(), Server.Schema.ID.toString(), Server.Schema.CHUNK_SIZE.toString());
184 
185  filters.stream()
186  .map(KeywordQueryFilter::toString)
187  .forEach(solrQuery::addFilterQuery);
188 
189  solrQuery.setRows(MAX_RESULTS_PER_CURSOR_MARK);
190  // Setting the sort order is necessary for cursor based paging to work.
191  solrQuery.setSort(SortClause.asc(Server.Schema.ID.toString()));
192 
193  String cursorMark = CursorMarkParams.CURSOR_MARK_START;
194  SolrDocumentList resultList ;
195  boolean allResultsProcessed = false;
196 
197  while (!allResultsProcessed) {
198  try {
199  solrQuery.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark);
200  QueryResponse response = solrServer.query(solrQuery, SolrRequest.METHOD.POST);
201  resultList = response.getResults();
202 
203  for (SolrDocument resultDoc : resultList) {
204  try {
205  List<KeywordHit> keywordHits = createKeywordHits(resultDoc);
206  for (KeywordHit hit : keywordHits) {
207  hitsMultiMap.put(new Keyword(hit.getHit(), true, true, originalKeyword.getListName(), originalKeyword.getOriginalTerm()), hit);
208  }
209  } catch (TskException ex) {
210  //
211  }
212  }
213 
214  String nextCursorMark = response.getNextCursorMark();
215  if (cursorMark.equals(nextCursorMark)) {
216  allResultsProcessed = true;
217  }
218  cursorMark = nextCursorMark;
219  } catch (KeywordSearchModuleException ex) {
220  LOGGER.log(Level.SEVERE, "Error executing Regex Solr Query: " + keywordString, ex); //NON-NLS
221  MessageNotifyUtil.Notify.error(NbBundle.getMessage(Server.class, "Server.query.exception.msg", keywordString), ex.getCause().getMessage());
222  }
223  }
224  QueryResults results = new QueryResults(this);
225  for (Keyword k : hitsMultiMap.keySet()) {
226  results.addResult(k, hitsMultiMap.get(k));
227  }
228  return results;
229  }
230 
231  private List<KeywordHit> createKeywordHits(SolrDocument solrDoc) throws TskException {
232 
233  List<KeywordHit> hits = new ArrayList<>();
234  final String docId = solrDoc.getFieldValue(Server.Schema.ID.toString()).toString();
235  final Integer chunkSize = (Integer) solrDoc.getFieldValue(Server.Schema.CHUNK_SIZE.toString());
236 
237  final Collection<Object> content_str = solrDoc.getFieldValues(Server.Schema.CONTENT_STR.toString());
238 
239  final Pattern pattern = Pattern.compile(keywordString);
240  for (Object content_obj : content_str) {
241  String content = (String) content_obj;
242  Matcher hitMatcher = pattern.matcher(content);
243  int offset = 0;
244 
245  while (hitMatcher.find(offset)) {
246  StringBuilder snippet = new StringBuilder();
247 
248  // If the location of the hit is beyond this chunk (i.e. it
249  // exists in the overlap region), we skip the hit. It will
250  // show up again as a hit in the chunk following this one.
251  if (chunkSize != null && hitMatcher.start() >= chunkSize) {
252  break;
253  }
254 
255  String hit = hitMatcher.group();
256 
257  offset = hitMatcher.end();
258 
259  // We attempt to reduce false positives for phone numbers and IP address hits
260  // by querying Solr for hits delimited by a set of known boundary characters.
261  // See KeywordSearchList.PHONE_NUMBER_REGEX for an example.
262  // Because of this the hits may contain an extra character at the beginning or end that
263  // needs to be chopped off, unless the user has supplied their own wildcard suffix
264  // as part of the regex.
265  if (!queryStringContainsWildcardSuffix
266  && (originalKeyword.getArtifactAttributeType() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER
267  || originalKeyword.getArtifactAttributeType() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IP_ADDRESS)) {
268  if (originalKeyword.getArtifactAttributeType() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER) {
269  // For phone numbers replace all non numeric characters (except "(") at the start of the hit.
270  hit = hit.replaceAll("^[^0-9\\(]", "");
271  } else {
272  // Replace all non numeric characters at the start of the hit.
273  hit = hit.replaceAll("^[^0-9]", "");
274  }
275  // Replace all non numeric at the end of the hit.
276  hit = hit.replaceAll("[^0-9]$", "");
277  }
278 
279  if (originalKeyword.getArtifactAttributeType() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL) {
280  // Reduce false positives by eliminating email address hits that are either
281  // too short or are not for valid top level domains.
282  if (hit.length() < MIN_EMAIL_ADDR_LENGTH
283  || !DomainValidator.getInstance(true).isValidTld(hit.substring(hit.lastIndexOf('.')))) {
284  continue;
285  }
286  }
287 
288  /*
289  * If searching for credit card account numbers, do a Luhn check
290  * on the term and discard it if it does not pass.
291  */
292  if (originalKeyword.getArtifactAttributeType() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_NUMBER) {
293  Matcher ccnMatcher = CREDIT_CARD_NUM_PATTERN.matcher(hit);
294  if (ccnMatcher.find()) {
295  final String ccn = CharMatcher.anyOf(" -").removeFrom(ccnMatcher.group("ccn"));
296  if (false == TermsComponentQuery.CREDIT_CARD_NUM_LUHN_CHECK.isValid(ccn)) {
297  continue;
298  }
299  } else {
300  continue;
301  }
302  }
303 
308  int maxIndex = content.length() - 1;
309  snippet.append(content.substring(Integer.max(0, hitMatcher.start() - 20), Integer.max(0, hitMatcher.start())));
310  snippet.appendCodePoint(171);
311  snippet.append(hit);
312  snippet.appendCodePoint(171);
313  snippet.append(content.substring(Integer.min(maxIndex, hitMatcher.end()), Integer.min(maxIndex, hitMatcher.end() + 20)));
314 
315  hits.add(new KeywordHit(docId, snippet.toString(), hit));
316  }
317  }
318  return hits;
319  }
320 
321  @Override
322  public void addFilter(KeywordQueryFilter filter) {
323  this.filters.add(filter);
324  }
325 
326  @Override
327  public void setField(String field) {
328  this.field = field;
329  }
330 
331  @Override
332  public void setSubstringQuery() {
333  }
334 
335  @Override
336  synchronized public void escape() {
337  if (isEscaped() == false) {
338  escapedQuery = KeywordSearchUtil.escapeLuceneQuery(keywordString);
339  escaped = true;
340  }
341  }
342 
343  @Override
344  synchronized public boolean isEscaped() {
345  return escaped;
346  }
347 
348  @Override
349  public boolean isLiteral() {
350  return false;
351  }
352 
353  @Override
354  public String getQueryString() {
355  return originalKeyword.getSearchTerm();
356  }
357 
358  @Override
359  synchronized public String getEscapedQueryString() {
360  if (false == isEscaped()) {
361  escape();
362  }
363  return escapedQuery;
364  }
365 
376  private String getDocumentIds(Keyword keyword, KeywordHit hit) {
377  Set<String> documentIds = new HashSet<>();
378 
379  for (KeywordHit h : hitsMultiMap.get(keyword)) {
380  // Add the document id only if it is for the same object as the
381  // given hit and we haven't already seen it.
382  if (h.getSolrObjectId() == hit.getSolrObjectId() && !documentIds.contains(h.getSolrDocumentId())) {
383  documentIds.add(h.getSolrDocumentId());
384  }
385  }
386 
387  return StringUtils.join(documentIds, ",");
388  }
389 
404  // TODO: Are we actually making meaningful use of the KeywordCachedArtifact
405  // class?
406  @Override
407  public KeywordCachedArtifact writeSingleFileHitsToBlackBoard(Keyword foundKeyword, KeywordHit hit, String snippet, String listName) {
408  final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName();
409 
410  /*
411  * Create either a "plain vanilla" keyword hit artifact with keyword and
412  * regex attributes, or a credit card account artifact with attributes
413  * parsed from from the snippet for the hit and looked up based on the
414  * parsed bank identifcation number.
415  */
416  BlackboardArtifact newArtifact;
417  Collection<BlackboardAttribute> attributes = new ArrayList<>();
418  if (originalKeyword.getArtifactAttributeType() != BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_NUMBER) {
419  attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD, MODULE_NAME, foundKeyword.getSearchTerm()));
420  attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP, MODULE_NAME, getQueryString()));
421  try {
422  newArtifact = hit.getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT);
423 
424  } catch (TskCoreException ex) {
425  LOGGER.log(Level.SEVERE, "Error adding artifact for keyword hit to blackboard", ex); //NON-NLS
426  return null;
427  }
428  } else {
429  /*
430  * Parse the credit card account attributes from the snippet for the
431  * hit.
432  */
433  attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE, MODULE_NAME, Account.Type.CREDIT_CARD.name()));
434  Map<BlackboardAttribute.Type, BlackboardAttribute> parsedTrackAttributeMap = new HashMap<>();
435  Matcher matcher = TermsComponentQuery.CREDIT_CARD_TRACK1_PATTERN.matcher(hit.getSnippet());
436  if (matcher.find()) {
437  parseTrack1Data(parsedTrackAttributeMap, matcher);
438  }
439  matcher = CREDIT_CARD_TRACK2_PATTERN.matcher(hit.getSnippet());
440  if (matcher.find()) {
441  parseTrack2Data(parsedTrackAttributeMap, matcher);
442  }
443  final BlackboardAttribute ccnAttribute = parsedTrackAttributeMap.get(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_NUMBER));
444  if (ccnAttribute == null || StringUtils.isBlank(ccnAttribute.getValueString())) {
445  if (hit.isArtifactHit()) {
446  LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for artifact keyword hit: term = %s, snippet = '%s', artifact id = %d", foundKeyword.getSearchTerm(), hit.getSnippet(), hit.getArtifact().getArtifactID())); //NON-NLS
447  } else {
448  LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for content keyword hit: term = %s, snippet = '%s', object id = %d", foundKeyword.getSearchTerm(), hit.getSnippet(), hit.getContent().getId())); //NON-NLS
449  }
450  return null;
451  }
452  attributes.addAll(parsedTrackAttributeMap.values());
453 
454  /*
455  * Look up the bank name, scheme, etc. attributes for the bank
456  * indentification number (BIN).
457  */
458  final int bin = Integer.parseInt(ccnAttribute.getValueString().substring(0, 8));
459  CreditCards.BankIdentificationNumber binInfo = CreditCards.getBINInfo(bin);
460  if (binInfo != null) {
461  binInfo.getScheme().ifPresent(scheme
462  -> attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_SCHEME, MODULE_NAME, scheme)));
463  binInfo.getCardType().ifPresent(cardType
464  -> attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_TYPE, MODULE_NAME, cardType)));
465  binInfo.getBrand().ifPresent(brand
466  -> attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_BRAND_NAME, MODULE_NAME, brand)));
467  binInfo.getBankName().ifPresent(bankName
468  -> attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_BANK_NAME, MODULE_NAME, bankName)));
469  binInfo.getBankPhoneNumber().ifPresent(phoneNumber
470  -> attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, MODULE_NAME, phoneNumber)));
471  binInfo.getBankURL().ifPresent(url
472  -> attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, MODULE_NAME, url)));
473  binInfo.getCountry().ifPresent(country
474  -> attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNTRY, MODULE_NAME, country)));
475  binInfo.getBankCity().ifPresent(city
476  -> attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CITY, MODULE_NAME, city)));
477  }
478 
479  /*
480  * If the hit is from unused or unallocated space, record the Solr
481  * document id to support showing just the chunk that contained the
482  * hit.
483  */
484  if (hit.getContent() instanceof AbstractFile) {
485  AbstractFile file = (AbstractFile) hit.getContent();
486  if (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS
487  || file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) {
488  attributes.add(new BlackboardAttribute(KEYWORD_SEARCH_DOCUMENT_ID, MODULE_NAME, hit.getSolrDocumentId()));
489  }
490  }
491 
492  /*
493  * Create an account artifact.
494  */
495  try {
496  newArtifact = hit.getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT);
497  } catch (TskCoreException ex) {
498  LOGGER.log(Level.SEVERE, "Error adding artifact for account to blackboard", ex); //NON-NLS
499  return null;
500  }
501  }
502 
503  if (StringUtils.isNotBlank(listName)) {
504  attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, listName));
505  }
506  if (snippet != null) {
507  attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW, MODULE_NAME, snippet));
508  }
509  if (hit.isArtifactHit()) {
510  attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, hit.getArtifact().getArtifactID()));
511  }
512 
513  attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_TYPE, MODULE_NAME, KeywordSearch.QueryType.REGEX.ordinal()));
514 
515  try {
516  newArtifact.addAttributes(attributes);
517  KeywordCachedArtifact writeResult = new KeywordCachedArtifact(newArtifact);
518  writeResult.add(attributes);
519  return writeResult;
520  } catch (TskCoreException e) {
521  LOGGER.log(Level.SEVERE, "Error adding bb attributes for terms search artifact", e); //NON-NLS
522  return null;
523  }
524  }
525 
534  static private void parseTrack2Data(Map<BlackboardAttribute.Type, BlackboardAttribute> attributesMap, Matcher matcher) {
535  addAttributeIfNotAlreadyCaptured(attributesMap, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_NUMBER, "accountNumber", matcher);
536  addAttributeIfNotAlreadyCaptured(attributesMap, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_EXPIRATION, "expiration", matcher);
537  addAttributeIfNotAlreadyCaptured(attributesMap, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_SERVICE_CODE, "serviceCode", matcher);
538  addAttributeIfNotAlreadyCaptured(attributesMap, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_DISCRETIONARY, "discretionary", matcher);
539  addAttributeIfNotAlreadyCaptured(attributesMap, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_LRC, "LRC", matcher);
540  }
541 
551  static private void parseTrack1Data(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMap, Matcher matcher) {
552  parseTrack2Data(attributeMap, matcher);
553  addAttributeIfNotAlreadyCaptured(attributeMap, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME_PERSON, "name", matcher);
554  }
555 
567  static private void addAttributeIfNotAlreadyCaptured(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMap, BlackboardAttribute.ATTRIBUTE_TYPE attrType, String groupName, Matcher matcher) {
568  BlackboardAttribute.Type type = new BlackboardAttribute.Type(attrType);
569  attributeMap.computeIfAbsent(type, (BlackboardAttribute.Type t) -> {
570  String value = matcher.group(groupName);
571  if (attrType.equals(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_NUMBER)) {
572  attributeMap.put(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD),
573  new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD, MODULE_NAME, value));
574  value = CharMatcher.anyOf(" -").removeFrom(value);
575  }
576  if (StringUtils.isNotBlank(value)) {
577  return new BlackboardAttribute(attrType, MODULE_NAME, value);
578  }
579  return null;
580  });
581  }
582 
583 }

Copyright © 2012-2016 Basis Technology. Generated on: Mon Apr 24 2017
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.