Autopsy  4.19.3
Graphical digital forensics platform for The Sleuth Kit and other tools.
Server.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2011-2021 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.keywordsearch;
20 
21 import com.google.common.util.concurrent.ThreadFactoryBuilder;
22 import java.awt.event.ActionEvent;
23 import java.beans.PropertyChangeListener;
24 import java.io.BufferedReader;
25 import java.io.BufferedWriter;
26 import java.io.File;
27 import java.io.FileOutputStream;
28 import java.io.IOException;
29 import java.io.InputStream;
30 import java.io.InputStreamReader;
31 import java.io.OutputStream;
32 import java.io.OutputStreamWriter;
33 import java.net.ConnectException;
34 import java.net.DatagramSocket;
35 import java.net.ServerSocket;
36 import java.net.SocketException;
37 import java.nio.charset.Charset;
38 import java.nio.file.Files;
39 import java.nio.file.OpenOption;
40 import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
41 import java.nio.file.Path;
42 import java.nio.file.Paths;
43 import java.util.ArrayList;
44 import java.util.Arrays;
45 import java.util.Collections;
46 import java.util.Iterator;
47 import java.util.List;
48 import java.util.Random;
49 import java.util.concurrent.ScheduledThreadPoolExecutor;
50 import java.util.concurrent.TimeUnit;
51 import java.util.concurrent.locks.ReentrantReadWriteLock;
52 import java.util.logging.Level;
53 import javax.swing.AbstractAction;
54 import org.apache.commons.io.FileUtils;
55 import java.util.concurrent.TimeoutException;
56 import java.util.concurrent.atomic.AtomicBoolean;
57 import java.util.concurrent.atomic.AtomicInteger;
58 import java.util.stream.Collectors;
59 import static java.util.stream.Collectors.toList;
60 import org.apache.solr.client.solrj.SolrQuery;
61 import org.apache.solr.client.solrj.SolrRequest;
62 import org.apache.solr.client.solrj.SolrServerException;
63 import org.apache.solr.client.solrj.SolrClient;
64 import org.apache.solr.client.solrj.impl.HttpSolrClient;
65 import org.apache.solr.client.solrj.impl.CloudSolrClient;
66 import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient;
67 import org.apache.solr.client.solrj.impl.XMLResponseParser;
68 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
69 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
70 import org.apache.solr.client.solrj.request.CoreAdminRequest;
71 import org.apache.solr.client.solrj.response.CoreAdminResponse;
72 import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException;
73 import org.apache.solr.client.solrj.request.QueryRequest;
74 import org.apache.solr.client.solrj.response.QueryResponse;
75 import org.apache.solr.client.solrj.response.TermsResponse;
76 import org.apache.solr.client.solrj.response.TermsResponse.Term;
77 import org.apache.solr.common.SolrDocument;
78 import org.apache.solr.common.SolrDocumentList;
79 import org.apache.solr.common.SolrException;
80 import org.apache.solr.common.SolrInputDocument;
81 import org.apache.solr.common.util.NamedList;
82 import org.openide.modules.InstalledFileLocator;
83 import org.openide.modules.Places;
84 import org.openide.util.NbBundle;
85 import org.openide.windows.WindowManager;
102 import org.sleuthkit.datamodel.Content;
103 
108 public class Server {
109 
113  public static enum Schema {
114 
115  ID {
116  @Override
117  public String toString() {
118  return "id"; //NON-NLS
119  }
120  },
121  IMAGE_ID {
122  @Override
123  public String toString() {
124  return "image_id"; //NON-NLS
125  }
126  },
127  // This is not stored or indexed. it is copied to text by the schema
128  CONTENT {
129  @Override
130  public String toString() {
131  return "content"; //NON-NLS
132  }
133  },
134  // String representation for regular expression searching
135  CONTENT_STR {
136  @Override
137  public String toString() {
138  return "content_str"; //NON-NLS
139  }
140  },
141  // default search field. Populated by schema
142  TEXT {
143  @Override
144  public String toString() {
145  return "text"; //NON-NLS
146  }
147  },
148  // no longer populated. Was used for regular expression searching.
149  // Should not be used.
150  CONTENT_WS {
151  @Override
152  public String toString() {
153  return "content_ws"; //NON-NLS
154  }
155  },
156  CONTENT_JA {
157  @Override
158  public String toString() {
159  return "content_ja"; //NON-NLS
160  }
161  },
162  LANGUAGE {
163  @Override
164  public String toString() {
165  return "language"; //NON-NLS
166  }
167  },
168  FILE_NAME {
169  @Override
170  public String toString() {
171  return "file_name"; //NON-NLS
172  }
173  },
174  // note that we no longer store or index this field
175  CTIME {
176  @Override
177  public String toString() {
178  return "ctime"; //NON-NLS
179  }
180  },
181  // note that we no longer store or index this field
182  ATIME {
183  @Override
184  public String toString() {
185  return "atime"; //NON-NLS
186  }
187  },
188  // note that we no longer store or index this field
189  MTIME {
190  @Override
191  public String toString() {
192  return "mtime"; //NON-NLS
193  }
194  },
195  // note that we no longer store or index this field
196  CRTIME {
197  @Override
198  public String toString() {
199  return "crtime"; //NON-NLS
200  }
201  },
202  NUM_CHUNKS {
203  @Override
204  public String toString() {
205  return "num_chunks"; //NON-NLS
206  }
207  },
208  CHUNK_SIZE {
209  @Override
210  public String toString() {
211  return "chunk_size"; //NON-NLS
212  }
213  },
219  TERMFREQ {
220  @Override
221  public String toString() {
222  return "termfreq"; //NON-NLS
223  }
224  }
225  };
226 
227  public static final String HL_ANALYZE_CHARS_UNLIMITED = "500000"; //max 1MB in a chunk. use -1 for unlimited, but -1 option may not be supported (not documented)
228  //max content size we can send to Solr
229  public static final long MAX_CONTENT_SIZE = 1L * 31 * 1024 * 1024;
230  private static final Logger logger = Logger.getLogger(Server.class.getName());
231  public static final String CORE_EVT = "CORE_EVT"; //NON-NLS
232  @Deprecated
233  public static final char ID_CHUNK_SEP = '_';
234  public static final String CHUNK_ID_SEPARATOR = "_";
235  private String javaPath = "java";
236  public static final Charset DEFAULT_INDEXED_TEXT_CHARSET = Charset.forName("UTF-8");
237  private Process curSolrProcess = null;
238  static final String PROPERTIES_FILE = KeywordSearchSettings.MODULE_NAME;
239  static final String PROPERTIES_CURRENT_SERVER_PORT = "IndexingServerPort"; //NON-NLS
240  static final String PROPERTIES_CURRENT_STOP_PORT = "IndexingServerStopPort"; //NON-NLS
241  private static final String KEY = "jjk#09s"; //NON-NLS
242  static final String DEFAULT_SOLR_SERVER_HOST = "localhost"; //NON-NLS
243  static final int DEFAULT_SOLR_SERVER_PORT = 23232;
244  static final int DEFAULT_SOLR_STOP_PORT = 34343;
245  private int localSolrServerPort = 0;
246  private int localSolrStopPort = 0;
247  private File localSolrFolder;
248  private static final String SOLR = "solr";
249  private static final String CORE_PROPERTIES = "core.properties";
250  private static final boolean DEBUG = false;//(Version.getBuildType() == Version.Type.DEVELOPMENT);
251  private static final int NUM_COLLECTION_CREATION_RETRIES = 5;
252  private static final int NUM_EMBEDDED_SERVER_RETRIES = 12; // attempt to connect to embedded Solr server for 1 minute
253  private static final int EMBEDDED_SERVER_RETRY_WAIT_SEC = 5;
254 
255  public enum CORE_EVT_STATES {
256 
257  STOPPED, STARTED
258  };
259 
260  private enum SOLR_VERSION {
261 
262  SOLR8, SOLR4
263  };
264 
265  // A reference to the locally running Solr instance.
266  private HttpSolrClient localSolrServer = null;
267  private SOLR_VERSION localServerVersion = SOLR_VERSION.SOLR8; // start local Solr 8 by default
268 
269  // A reference to the remote/network running Solr instance.
270  private HttpSolrClient remoteSolrServer;
271 
272  private Collection currentCollection;
273  private final ReentrantReadWriteLock currentCoreLock;
274 
275  private final ServerAction serverAction;
277 
282  Server() {
283  initSettings();
284 
285  localSolrServer = getSolrClient("http://localhost:" + localSolrServerPort + "/solr");
286 
287  serverAction = new ServerAction();
288  File solr8Folder = InstalledFileLocator.getDefault().locate("solr", Server.class.getPackage().getName(), false); //NON-NLS
289  File solr4Folder = InstalledFileLocator.getDefault().locate("solr4", Server.class.getPackage().getName(), false); //NON-NLS
290 
291  // Figure out where Java is located. The Java home location
292  // will be passed as the SOLR_JAVA_HOME environment
293  // variable to the Solr script but it can be overridden by the user in
294  // either autopsy-solr.cmd or autopsy-solr-in.cmd.
295  javaPath = PlatformUtil.getJavaPath();
296 
297  Path solr8Home = Paths.get(PlatformUtil.getUserDirectory().getAbsolutePath(), "solr"); //NON-NLS
298  try {
299  // Always copy the config files, as they may have changed. Otherwise potentially stale Solr configuration is being used.
300  if (!solr8Home.toFile().exists()) {
301  Files.createDirectory(solr8Home);
302  } else {
303  // delete the configsets directory as the Autopsy configset could have changed
304  FileUtil.deleteDir(solr8Home.resolve("configsets").toFile());
305  }
306  Files.copy(Paths.get(solr8Folder.getAbsolutePath(), "server", "solr", "solr.xml"), solr8Home.resolve("solr.xml"), REPLACE_EXISTING); //NON-NLS
307  Files.copy(Paths.get(solr8Folder.getAbsolutePath(), "server", "solr", "zoo.cfg"), solr8Home.resolve("zoo.cfg"), REPLACE_EXISTING); //NON-NLS
308  FileUtils.copyDirectory(Paths.get(solr8Folder.getAbsolutePath(), "server", "solr", "configsets").toFile(), solr8Home.resolve("configsets").toFile()); //NON-NLS
309  } catch (IOException ex) {
310  logger.log(Level.SEVERE, "Failed to create Solr 8 home folder:", ex); //NON-NLS
311  }
312 
313  Path solr4Home = Paths.get(PlatformUtil.getUserDirectory().getAbsolutePath(), "solr4"); //NON-NLS
314  try {
315  // Always copy the config files, as they may have changed. Otherwise potentially stale Solr configuration is being used.
316  if (!solr4Home.toFile().exists()) {
317  Files.createDirectory(solr4Home);
318  }
319  Files.copy(Paths.get(solr4Folder.getAbsolutePath(), "solr", "solr.xml"), solr4Home.resolve("solr.xml"), REPLACE_EXISTING); //NON-NLS
320  Files.copy(Paths.get(solr4Folder.getAbsolutePath(), "solr", "zoo.cfg"), solr4Home.resolve("zoo.cfg"), REPLACE_EXISTING); //NON-NLS
321  } catch (IOException ex) {
322  logger.log(Level.SEVERE, "Failed to create Solr 4 home folder:", ex); //NON-NLS
323  }
324 
325  currentCoreLock = new ReentrantReadWriteLock(true);
326 
327  logger.log(Level.INFO, "Created Server instance using Java at {0}", javaPath); //NON-NLS
328  }
329 
330  private void initSettings() {
331 
332  if (ModuleSettings.settingExists(PROPERTIES_FILE, PROPERTIES_CURRENT_SERVER_PORT)) {
333  try {
334  localSolrServerPort = Integer.decode(ModuleSettings.getConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_SERVER_PORT));
335  } catch (NumberFormatException nfe) {
336  logger.log(Level.WARNING, "Could not decode indexing server port, value was not a valid port number, using the default. ", nfe); //NON-NLS
337  localSolrServerPort = DEFAULT_SOLR_SERVER_PORT;
338  }
339  } else {
340  localSolrServerPort = DEFAULT_SOLR_SERVER_PORT;
341  ModuleSettings.setConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_SERVER_PORT, String.valueOf(localSolrServerPort));
342  }
343 
344  if (ModuleSettings.settingExists(PROPERTIES_FILE, PROPERTIES_CURRENT_STOP_PORT)) {
345  try {
346  localSolrStopPort = Integer.decode(ModuleSettings.getConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_STOP_PORT));
347  } catch (NumberFormatException nfe) {
348  logger.log(Level.WARNING, "Could not decode indexing server stop port, value was not a valid port number, using default", nfe); //NON-NLS
349  localSolrStopPort = DEFAULT_SOLR_STOP_PORT;
350  }
351  } else {
352  localSolrStopPort = DEFAULT_SOLR_STOP_PORT;
353  ModuleSettings.setConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_STOP_PORT, String.valueOf(localSolrStopPort));
354  }
355  }
356 
357  private HttpSolrClient getSolrClient(String solrUrl) {
358  int connectionTimeoutMs = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getConnectionTimeout();
359  return new HttpSolrClient.Builder(solrUrl)
360  .withSocketTimeout(connectionTimeoutMs)
361  .withConnectionTimeout(connectionTimeoutMs)
362  .withResponseParser(new XMLResponseParser())
363  .build();
364  }
365 
366  private ConcurrentUpdateSolrClient getConcurrentClient(String solrUrl) {
367  int numThreads = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getNumThreads();
368  int numDocs = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getDocumentsQueueSize();
369  int connectionTimeoutMs = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getConnectionTimeout();
370  logger.log(Level.INFO, "Creating new ConcurrentUpdateSolrClient: {0}", solrUrl); //NON-NLS
371  logger.log(Level.INFO, "Queue size = {0}, Number of threads = {1}, Connection Timeout (ms) = {2}", new Object[]{numDocs, numThreads, connectionTimeoutMs}); //NON-NLS
372  ConcurrentUpdateSolrClient client = new ConcurrentUpdateSolrClient.Builder(solrUrl)
373  .withQueueSize(numDocs)
374  .withThreadCount(numThreads)
375  .withSocketTimeout(connectionTimeoutMs)
376  .withConnectionTimeout(connectionTimeoutMs)
377  .withResponseParser(new XMLResponseParser())
378  .build();
379 
380  return client;
381  }
382 
383  private CloudSolrClient getCloudSolrClient(String host, String port, String defaultCollectionName) throws KeywordSearchModuleException {
384  List<String> solrServerList = getSolrServerList(host, port);
385  List<String> solrUrls = new ArrayList<>();
386  for (String server : solrServerList) {
387  solrUrls.add("http://" + server + "/solr");
388  logger.log(Level.INFO, "Using Solr server: {0}", server);
389  }
390 
391  logger.log(Level.INFO, "Creating new CloudSolrClient"); //NON-NLS
392  int connectionTimeoutMs = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getConnectionTimeout();
393  CloudSolrClient client = new CloudSolrClient.Builder(solrUrls)
394  .withConnectionTimeout(connectionTimeoutMs)
395  .withSocketTimeout(connectionTimeoutMs)
396  .withResponseParser(new XMLResponseParser())
397  .build();
398  if (!defaultCollectionName.isEmpty()) {
399  client.setDefaultCollection(defaultCollectionName);
400  }
401  client.connect();
402  return client;
403  }
404 
405  @Override
406  public void finalize() throws java.lang.Throwable {
407  stop();
408  super.finalize();
409  }
410 
411  public void addServerActionListener(PropertyChangeListener l) {
412  serverAction.addPropertyChangeListener(l);
413  }
414 
415  int getLocalSolrServerPort() {
416  return localSolrServerPort;
417  }
418 
419  int getLocalSolrStopPort() {
420  return localSolrStopPort;
421  }
422 
426  private static class InputStreamPrinterThread extends Thread {
427 
428  InputStream stream;
429  OutputStream out;
430  volatile boolean doRun = true;
431 
432  InputStreamPrinterThread(InputStream stream, String type) {
433  this.stream = stream;
434  try {
435  final String log = Places.getUserDirectory().getAbsolutePath()
436  + File.separator + "var" + File.separator + "log" //NON-NLS
437  + File.separator + "solr.log." + type; //NON-NLS
438  File outputFile = new File(log.concat(".0"));
439  File first = new File(log.concat(".1"));
440  File second = new File(log.concat(".2"));
441  if (second.exists()) {
442  second.delete();
443  }
444  if (first.exists()) {
445  first.renameTo(second);
446  }
447  if (outputFile.exists()) {
448  outputFile.renameTo(first);
449  } else {
450  outputFile.createNewFile();
451  }
452  out = new FileOutputStream(outputFile);
453 
454  } catch (Exception ex) {
455  logger.log(Level.WARNING, "Failed to create solr log file", ex); //NON-NLS
456  }
457  }
458 
459  void stopRun() {
460  doRun = false;
461  }
462 
463  @Override
464  public void run() {
465 
466  try (InputStreamReader isr = new InputStreamReader(stream);
467  BufferedReader br = new BufferedReader(isr);
468  OutputStreamWriter osw = new OutputStreamWriter(out, PlatformUtil.getDefaultPlatformCharset());
469  BufferedWriter bw = new BufferedWriter(osw);) {
470 
471  String line = null;
472  while (doRun && (line = br.readLine()) != null) {
473  bw.write(line);
474  bw.newLine();
475  if (DEBUG) {
476  //flush buffers if dev version for debugging
477  bw.flush();
478  }
479  }
480  bw.flush();
481  } catch (IOException ex) {
482  logger.log(Level.SEVERE, "Error redirecting Solr output stream", ex); //NON-NLS
483  }
484  }
485  }
486 
496  private Process runLocalSolr8ControlCommand(List<String> solrArguments) throws IOException {
497  final String MAX_SOLR_MEM_MB_PAR = "-Xmx" + UserPreferences.getMaxSolrVMSize() + "m"; //NON-NLS
498 
499  // This is our customized version of the Solr batch script to start/stop Solr.
500  File solr8Folder = InstalledFileLocator.getDefault().locate("solr", Server.class.getPackage().getName(), false); //NON-NLS
501  Path solr8CmdPath;
503  solr8CmdPath = Paths.get(solr8Folder.getAbsolutePath(), "bin", "autopsy-solr.cmd"); //NON-NLS
504  } else {
505  solr8CmdPath = Paths.get(solr8Folder.getAbsolutePath(), "bin", "autopsy-solr"); //NON-NLS
506  }
507  Path solr8Home = Paths.get(PlatformUtil.getUserDirectory().getAbsolutePath(), "solr"); //NON-NLS
508 
509  List<String> commandLine = new ArrayList<>();
510  commandLine.add(solr8CmdPath.toString());
511  commandLine.addAll(solrArguments);
512 
513  ProcessBuilder solrProcessBuilder = new ProcessBuilder(commandLine);
514  solrProcessBuilder.directory(solr8Folder);
515 
516  // Redirect stdout and stderr to files to prevent blocking.
517  Path solrStdoutPath = Paths.get(Places.getUserDirectory().getAbsolutePath(), "var", "log", "solr.log.stdout"); //NON-NLS
518  solrProcessBuilder.redirectOutput(solrStdoutPath.toFile());
519 
520  Path solrStderrPath = Paths.get(Places.getUserDirectory().getAbsolutePath(), "var", "log", "solr.log.stderr"); //NON-NLS
521  solrProcessBuilder.redirectError(solrStderrPath.toFile());
522 
523  // get the path to the JRE folder. That's what Solr needs as SOLR_JAVA_HOME
524  String jreFolderPath = new File(javaPath).getParentFile().getParentFile().getAbsolutePath();
525 
526  solrProcessBuilder.environment().put("SOLR_JAVA_HOME", jreFolderPath); // NON-NLS
527  solrProcessBuilder.environment().put("SOLR_HOME", solr8Home.toString()); // NON-NLS
528  solrProcessBuilder.environment().put("STOP_KEY", KEY); // NON-NLS
529  solrProcessBuilder.environment().put("SOLR_JAVA_MEM", MAX_SOLR_MEM_MB_PAR); // NON-NLS
530  logger.log(Level.INFO, "Setting Solr 8 directory: {0}", solr8Folder.toString()); //NON-NLS
531  logger.log(Level.INFO, "Running Solr 8 command: {0} from {1}", new Object[]{solrProcessBuilder.command(), solr8Folder.toString()}); //NON-NLS
532  Process process = solrProcessBuilder.start();
533  logger.log(Level.INFO, "Finished running Solr 8 command"); //NON-NLS
534  return process;
535  }
536 
546  private Process runLocalSolr4ControlCommand(List<String> solrArguments) throws IOException {
547  final String MAX_SOLR_MEM_MB_PAR = "-Xmx" + UserPreferences.getMaxSolrVMSize() + "m"; //NON-NLS
548  File solr4Folder = InstalledFileLocator.getDefault().locate("solr4", Server.class.getPackage().getName(), false); //NON-NLS
549 
550  List<String> commandLine = new ArrayList<>();
551  commandLine.add(javaPath);
552  commandLine.add(MAX_SOLR_MEM_MB_PAR);
553  commandLine.add("-DSTOP.PORT=" + localSolrStopPort); //NON-NLS
554  commandLine.add("-Djetty.port=" + localSolrServerPort); //NON-NLS
555  commandLine.add("-DSTOP.KEY=" + KEY); //NON-NLS
556  commandLine.add("-jar"); //NON-NLS
557  commandLine.add("start.jar"); //NON-NLS
558 
559  commandLine.addAll(solrArguments);
560 
561  ProcessBuilder solrProcessBuilder = new ProcessBuilder(commandLine);
562  solrProcessBuilder.directory(solr4Folder);
563 
564  // Redirect stdout and stderr to files to prevent blocking.
565  Path solrStdoutPath = Paths.get(Places.getUserDirectory().getAbsolutePath(), "var", "log", "solr.log.stdout"); //NON-NLS
566  solrProcessBuilder.redirectOutput(solrStdoutPath.toFile());
567 
568  Path solrStderrPath = Paths.get(Places.getUserDirectory().getAbsolutePath(), "var", "log", "solr.log.stderr"); //NON-NLS
569  solrProcessBuilder.redirectError(solrStderrPath.toFile());
570 
571  logger.log(Level.INFO, "Running Solr 4 command: {0}", solrProcessBuilder.command()); //NON-NLS
572  Process process = solrProcessBuilder.start();
573  logger.log(Level.INFO, "Finished running Solr 4 command"); //NON-NLS
574  return process;
575  }
576 
582  List<Long> getSolrPIDs() {
583  List<Long> pids = new ArrayList<>();
584 
585  //NOTE: these needs to be in sync with process start string in start()
586  final String pidsQuery = "-DSTOP.KEY=" + KEY + "%start.jar"; //NON-NLS
587 
588  long[] pidsArr = PlatformUtil.getJavaPIDs(pidsQuery);
589  if (pidsArr != null) {
590  for (int i = 0; i < pidsArr.length; ++i) {
591  pids.add(pidsArr[i]);
592  }
593  }
594 
595  return pids;
596  }
597 
602  void killSolr() {
603  List<Long> solrPids = getSolrPIDs();
604  for (long pid : solrPids) {
605  logger.log(Level.INFO, "Trying to kill old Solr process, PID: {0}", pid); //NON-NLS
606  PlatformUtil.killProcess(pid);
607  }
608  }
609 
610  void start() throws KeywordSearchModuleException, SolrServerNoPortException, SolrServerException {
611  startLocalSolr(SOLR_VERSION.SOLR8);
612  }
613 
614  private void configureSolrConnection(Case theCase, Index index) throws KeywordSearchModuleException, SolrServerNoPortException {
615 
616  try {
617  if (theCase.getCaseType() == CaseType.SINGLE_USER_CASE) {
618 
619  // makes sure the proper local Solr server is running
620  if (IndexFinder.getCurrentSolrVersion().equals(index.getSolrVersion())) {
621  startLocalSolr(SOLR_VERSION.SOLR8);
622  } else {
623  startLocalSolr(SOLR_VERSION.SOLR4);
624  }
625 
626  // check if the local Solr server is running
627  if (!this.isLocalSolrRunning()) {
628  logger.log(Level.SEVERE, "Local Solr server is not running"); //NON-NLS
629  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.msg"));
630  }
631  } else {
632  // create SolrJ client to connect to remore Solr server
633  remoteSolrServer = configureMultiUserConnection(theCase, index, "");
634 
635  // test the connection
636  connectToSolrServer(remoteSolrServer);
637  }
638  } catch (SolrServerException | IOException ex) {
639  throw new KeywordSearchModuleException(NbBundle.getMessage(Server.class, "Server.connect.exception.msg", ex.getLocalizedMessage()), ex);
640  }
641  }
642 
657  private HttpSolrClient configureMultiUserConnection(Case theCase, Index index, String name) throws KeywordSearchModuleException {
658 
659  // read Solr connection info from user preferences, unless "solrserver.txt" is present
660  IndexingServerProperties properties = getMultiUserServerProperties(theCase.getCaseDirectory());
661  if (properties.host.isEmpty() || properties.port.isEmpty()) {
662  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.connectionInfoMissing.exception.msg", index.getSolrVersion()));
663  }
664  String solrUrl = "http://" + properties.host + ":" + properties.port + "/solr";
665 
666  if (!name.isEmpty()) {
667  solrUrl = solrUrl + "/" + name;
668  }
669 
670  // create SolrJ client to connect to remore Solr server
671  return getSolrClient(solrUrl);
672  }
673 
679  @NbBundle.Messages({
680  "Server.status.failed.msg=Local Solr server did not respond to status request. This may be because the server failed to start or is taking too long to initialize.",})
681  synchronized void startLocalSolr(SOLR_VERSION version) throws KeywordSearchModuleException, SolrServerNoPortException, SolrServerException {
682 
683  logger.log(Level.INFO, "Starting local Solr " + version + " server"); //NON-NLS
684  if (version == SOLR_VERSION.SOLR8) {
685  localSolrFolder = InstalledFileLocator.getDefault().locate("solr", Server.class.getPackage().getName(), false); //NON-NLS
686  } else {
687  // solr4
688  localSolrFolder = InstalledFileLocator.getDefault().locate("solr4", Server.class.getPackage().getName(), false); //NON-NLS
689  }
690 
691  if (isLocalSolrRunning()) {
692  if (localServerVersion.equals(version)) {
693  // this version of local server is already running
694  logger.log(Level.INFO, "Local Solr " + version + " server is already running"); //NON-NLS
695  return;
696  } else {
697  // wrong version of local server is running, stop it
698  stop();
699  }
700  }
701 
702  // set which version of local server is currently running
703  localServerVersion = version;
704 
705  if (!isPortAvailable(localSolrServerPort)) {
706  // There is something already listening on our port. Let's see if
707  // this is from an earlier run that didn't successfully shut down
708  // and if so kill it.
709  final List<Long> pids = this.getSolrPIDs();
710 
711  // If the culprit listening on the port is not a Solr process
712  // we refuse to start.
713  if (pids.isEmpty()) {
714  throw new SolrServerNoPortException(localSolrServerPort);
715  }
716 
717  // Ok, we've tried to stop it above but there still appears to be
718  // a Solr process listening on our port so we forcefully kill it.
719  killSolr();
720 
721  // If either of the ports are still in use after our attempt to kill
722  // previously running processes we give up and throw an exception.
723  if (!isPortAvailable(localSolrServerPort)) {
724  throw new SolrServerNoPortException(localSolrServerPort);
725  }
726  if (!isPortAvailable(localSolrStopPort)) {
727  throw new SolrServerNoPortException(localSolrStopPort);
728  }
729  }
730 
731  if (isPortAvailable(localSolrServerPort)) {
732  logger.log(Level.INFO, "Port [{0}] available, starting Solr", localSolrServerPort); //NON-NLS
733  try {
734  if (version == SOLR_VERSION.SOLR8) {
735  logger.log(Level.INFO, "Starting Solr 8 server"); //NON-NLS
736  curSolrProcess = runLocalSolr8ControlCommand(new ArrayList<>(Arrays.asList("start", "-p", //NON-NLS
737  Integer.toString(localSolrServerPort)))); //NON-NLS
738  } else {
739  // solr4
740  logger.log(Level.INFO, "Starting Solr 4 server"); //NON-NLS
741  curSolrProcess = runLocalSolr4ControlCommand(new ArrayList<>(
742  Arrays.asList("-Dbootstrap_confdir=../solr/configsets/AutopsyConfig/conf", //NON-NLS
743  "-Dcollection.configName=AutopsyConfig"))); //NON-NLS
744  }
745 
746  // Wait for the Solr server to start and respond to a statusRequest request.
747  for (int numRetries = 0; numRetries < NUM_EMBEDDED_SERVER_RETRIES; numRetries++) {
748  if (isLocalSolrRunning()) {
749  final List<Long> pids = this.getSolrPIDs();
750  logger.log(Level.INFO, "New Solr process PID: {0}", pids); //NON-NLS
751  return;
752  }
753 
754  // Local Solr server did not respond so we sleep for
755  // 5 seconds before trying again.
756  try {
757  TimeUnit.SECONDS.sleep(EMBEDDED_SERVER_RETRY_WAIT_SEC);
758  } catch (InterruptedException ex) {
759  logger.log(Level.WARNING, "Timer interrupted"); //NON-NLS
760  }
761  }
762 
763  // If we get here the Solr server has not responded to connection
764  // attempts in a timely fashion.
765  logger.log(Level.WARNING, "Local Solr server failed to respond to status requests.");
766  WindowManager.getDefault().invokeWhenUIReady(new Runnable() {
767  @Override
768  public void run() {
769  MessageNotifyUtil.Notify.error(
770  NbBundle.getMessage(this.getClass(), "Installer.errorInitKsmMsg"),
771  Bundle.Server_status_failed_msg());
772  }
773  });
774  } catch (SecurityException ex) {
775  throw new KeywordSearchModuleException(
776  NbBundle.getMessage(this.getClass(), "Server.start.exception.cantStartSolr.msg"), ex);
777  } catch (IOException ex) {
778  throw new KeywordSearchModuleException(
779  NbBundle.getMessage(this.getClass(), "Server.start.exception.cantStartSolr.msg2"), ex);
780  }
781  }
782  }
783 
790  static boolean isPortAvailable(int port) {
791  // implementation taken from https://stackoverflow.com/a/435579
792  if (port < 1 || port > 65535) {
793  throw new IllegalArgumentException("Invalid start port: " + port);
794  }
795 
796  ServerSocket ss = null;
797  DatagramSocket ds = null;
798  try {
799  ss = new ServerSocket(port);
800  ss.setReuseAddress(true);
801  ds = new DatagramSocket(port);
802  ds.setReuseAddress(true);
803  return true;
804  } catch (IOException e) {
805  } finally {
806  if (ds != null) {
807  ds.close();
808  }
809 
810  if (ss != null) {
811  try {
812  ss.close();
813  } catch (IOException e) {
814  /* should not be thrown */
815  }
816  }
817  }
818 
819  return false;
820  }
821 
822 
828  void changeSolrServerPort(int port) {
829  localSolrServerPort = port;
830  ModuleSettings.setConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_SERVER_PORT, String.valueOf(port));
831  }
832 
838  void changeSolrStopPort(int port) {
839  localSolrStopPort = port;
840  ModuleSettings.setConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_STOP_PORT, String.valueOf(port));
841  }
842 
848  synchronized void stop() {
849 
850  try {
851  // Close any open core before stopping server
852  closeCore();
853  } catch (KeywordSearchModuleException e) {
854  logger.log(Level.WARNING, "Failed to close core: ", e); //NON-NLS
855  }
856 
857  stopLocalSolr();
858  }
859 
863  private void stopLocalSolr() {
864  try {
865  //try graceful shutdown
866  Process process;
867  if (localServerVersion == SOLR_VERSION.SOLR8) {
868  logger.log(Level.INFO, "Stopping Solr 8 server"); //NON-NLS
869  process = runLocalSolr8ControlCommand(new ArrayList<>(Arrays.asList("stop", "-k", KEY, "-p", Integer.toString(localSolrServerPort)))); //NON-NLS
870  } else {
871  // solr 4
872  logger.log(Level.INFO, "Stopping Solr 4 server"); //NON-NLS
873  process = runLocalSolr4ControlCommand(new ArrayList<>(Arrays.asList("--stop"))); //NON-NLS
874  }
875 
876  logger.log(Level.INFO, "Waiting for Solr server to stop"); //NON-NLS
877  process.waitFor();
878 
879  //if still running, forcefully stop it
880  if (curSolrProcess != null) {
881  curSolrProcess.destroy();
882  curSolrProcess = null;
883  }
884 
885  } catch (IOException | InterruptedException ex) {
886  logger.log(Level.WARNING, "Error while attempting to stop Solr server", ex);
887  } finally {
888  //stop Solr stream -> log redirect threads
889  try {
890  if (errorRedirectThread != null) {
891  errorRedirectThread.stopRun();
892  errorRedirectThread = null;
893  }
894  } finally {
895  //if still running, kill it
896  killSolr();
897  }
898 
899  logger.log(Level.INFO, "Finished stopping Solr server"); //NON-NLS
900  }
901  }
902 
910  synchronized boolean isLocalSolrRunning() throws KeywordSearchModuleException {
911  try {
912 
913  if (isPortAvailable(localSolrServerPort)) {
914  return false;
915  }
916 
917  // making a statusRequest request here instead of just doing solrServer.ping(), because
918  // that doesn't work when there are no cores
919  //TODO handle timeout in cases when some other type of server on that port
921 
922  logger.log(Level.INFO, "Solr server is running"); //NON-NLS
923  } catch (SolrServerException ex) {
924 
925  Throwable cause = ex.getRootCause();
926 
927  // TODO: check if SocketExceptions should actually happen (is
928  // probably caused by starting a connection as the server finishes
929  // shutting down)
930  if (cause instanceof ConnectException || cause instanceof SocketException) { //|| cause instanceof NoHttpResponseException) {
931  logger.log(Level.INFO, "Solr server is not running, cause: {0}", cause.getMessage()); //NON-NLS
932  return false;
933  } else {
934  throw new KeywordSearchModuleException(
935  NbBundle.getMessage(this.getClass(), "Server.isRunning.exception.errCheckSolrRunning.msg"), ex);
936  }
937  } catch (SolrException ex) {
938  // Just log 404 errors for now...
939  logger.log(Level.INFO, "Solr server is not running", ex); //NON-NLS
940  return false;
941  } catch (IOException ex) {
942  throw new KeywordSearchModuleException(
943  NbBundle.getMessage(this.getClass(), "Server.isRunning.exception.errCheckSolrRunning.msg2"), ex);
944  }
945 
946  return true;
947  }
948 
949  /*
950  * ** Convenience methods for use while we only open one case at a time ***
951  */
961  void openCoreForCase(Case theCase, Index index) throws KeywordSearchModuleException {
962  currentCoreLock.writeLock().lock();
963  try {
964  currentCollection = openCore(theCase, index);
965 
966  try {
967  // execute a test query. if it fails, an exception will be thrown
969  } catch (NoOpenCoreException ex) {
970  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.cantOpen.msg"), ex);
971  }
972 
973  serverAction.putValue(CORE_EVT, CORE_EVT_STATES.STARTED);
974  } finally {
975  currentCoreLock.writeLock().unlock();
976  }
977  }
978 
984  boolean coreIsOpen() {
985  currentCoreLock.readLock().lock();
986  try {
987  return (null != currentCollection);
988  } finally {
989  currentCoreLock.readLock().unlock();
990  }
991  }
992 
993  Index getIndexInfo() throws NoOpenCoreException {
994  currentCoreLock.readLock().lock();
995  try {
996  if (null == currentCollection) {
997  throw new NoOpenCoreException();
998  }
999  return currentCollection.getIndexInfo();
1000  } finally {
1001  currentCoreLock.readLock().unlock();
1002  }
1003  }
1004 
1005  void closeCore() throws KeywordSearchModuleException {
1006  currentCoreLock.writeLock().lock();
1007  try {
1008  if (null != currentCollection) {
1009  currentCollection.close();
1010  serverAction.putValue(CORE_EVT, CORE_EVT_STATES.STOPPED);
1011  }
1012  } finally {
1013  currentCollection = null;
1014  currentCoreLock.writeLock().unlock();
1015  }
1016  }
1017 
1018  void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException, NoOpenCoreException {
1019  currentCoreLock.readLock().lock();
1020  try {
1021  if (null == currentCollection) {
1022  throw new NoOpenCoreException();
1023  }
1024  TimingMetric metric = HealthMonitor.getTimingMetric("Solr: Index chunk");
1025  currentCollection.addDocument(doc);
1026  HealthMonitor.submitTimingMetric(metric);
1027  } finally {
1028  currentCoreLock.readLock().unlock();
1029  }
1030  }
1031 
1040  @NbBundle.Messages({
1041  "# {0} - colelction name", "Server.deleteCore.exception.msg=Failed to delete Solr colelction {0}",})
1042  void deleteCollection(String coreName, CaseMetadata metadata) throws KeywordSearchServiceException, KeywordSearchModuleException {
1043  try {
1044  HttpSolrClient solrServer;
1045  if (metadata.getCaseType() == CaseType.SINGLE_USER_CASE) {
1046  solrServer = getSolrClient("http://localhost:" + localSolrServerPort + "/solr"); //NON-NLS
1047  CoreAdminResponse response = CoreAdminRequest.getStatus(coreName, solrServer);
1048  if (null != response.getCoreStatus(coreName).get("instanceDir")) { //NON-NLS
1049  /*
1050  * Send a core unload request to the Solr server, with the
1051  * parameter set that request deleting the index and the
1052  * instance directory (deleteInstanceDir = true). Note that
1053  * this removes everything related to the core on the server
1054  * (the index directory, the configuration files, etc.), but
1055  * does not delete the actual Solr text index because it is
1056  * currently stored in the case directory.
1057  */
1058  org.apache.solr.client.solrj.request.CoreAdminRequest.unloadCore(coreName, true, true, solrServer);
1059  }
1060  } else {
1061  IndexingServerProperties properties = getMultiUserServerProperties(metadata.getCaseDirectory());
1062  solrServer = getSolrClient("http://" + properties.getHost() + ":" + properties.getPort() + "/solr");
1063  connectToSolrServer(solrServer);
1064 
1065  CollectionAdminRequest.Delete deleteCollectionRequest = CollectionAdminRequest.deleteCollection(coreName);
1066  CollectionAdminResponse response = deleteCollectionRequest.process(solrServer);
1067  if (response.isSuccess()) {
1068  logger.log(Level.INFO, "Deleted collection {0}", coreName); //NON-NLS
1069  } else {
1070  logger.log(Level.WARNING, "Unable to delete collection {0}", coreName); //NON-NLS
1071  }
1072  }
1073  } catch (SolrServerException | IOException ex) {
1074  // We will get a RemoteSolrException with cause == null and detailsMessage
1075  // == "Already closed" if the core is not loaded. This is not an error in this scenario.
1076  if (!ex.getMessage().equals("Already closed")) { // NON-NLS
1077  throw new KeywordSearchServiceException(Bundle.Server_deleteCore_exception_msg(coreName), ex);
1078  }
1079  }
1080  }
1081 
1093  @NbBundle.Messages({
1094  "Server.exceptionMessage.unableToCreateCollection=Unable to create Solr collection",
1095  "Server.exceptionMessage.unableToBackupCollection=Unable to backup Solr collection",
1096  "Server.exceptionMessage.unableToRestoreCollection=Unable to restore Solr collection",
1097  })
1098  private Collection openCore(Case theCase, Index index) throws KeywordSearchModuleException {
1099 
1100  int numShardsToUse = 1;
1101  try {
1102  // connect to proper Solr server
1103  configureSolrConnection(theCase, index);
1104 
1105  if (theCase.getCaseType() == CaseType.MULTI_USER_CASE) {
1106  // select number of shards to use
1107  numShardsToUse = getNumShardsToUse();
1108  }
1109  } catch (Exception ex) {
1110  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1111  throw new KeywordSearchModuleException(NbBundle.getMessage(Server.class, "Server.connect.exception.msg", ex.getLocalizedMessage()), ex);
1112  }
1113 
1114  try {
1115  String collectionName = index.getIndexName();
1116 
1117  if (theCase.getCaseType() == CaseType.MULTI_USER_CASE) {
1118  if (!collectionExists(collectionName)) {
1119  /*
1120  * The collection does not exist. Make a request that will cause the colelction to be created.
1121  */
1122  boolean doRetry = false;
1123  for (int reTryAttempt = 0; reTryAttempt < NUM_COLLECTION_CREATION_RETRIES; reTryAttempt++) {
1124  try {
1125  doRetry = false;
1126  createMultiUserCollection(collectionName, numShardsToUse);
1127  } catch (Exception ex) {
1128  if (reTryAttempt >= NUM_COLLECTION_CREATION_RETRIES) {
1129  logger.log(Level.SEVERE, "Unable to create Solr collection " + collectionName, ex); //NON-NLS
1130  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.cantOpen.msg"), ex);
1131  } else {
1132  logger.log(Level.SEVERE, "Unable to create Solr collection " + collectionName + ". Re-trying...", ex); //NON-NLS
1133  Thread.sleep(1000L);
1134  doRetry = true;
1135  }
1136  }
1137  if (!doRetry) {
1138  break;
1139  }
1140  }
1141  }
1142  } else {
1143  if (!coreIsLoaded(collectionName)) {
1144  // In single user mode, the index is stored in case output directory
1145  File dataDir = new File(new File(index.getIndexPath()).getParent()); // "data dir" is the parent of the index directory
1146  if (!dataDir.exists()) {
1147  dataDir.mkdirs();
1148  }
1149 
1150  // In single user mode, if there is a core.properties file already,
1151  // we've hit a solr bug. Compensate by deleting it.
1152  if (theCase.getCaseType() == CaseType.SINGLE_USER_CASE) {
1153  Path corePropertiesFile = Paths.get(localSolrFolder.toString(), SOLR, collectionName, CORE_PROPERTIES);
1154  if (corePropertiesFile.toFile().exists()) {
1155  try {
1156  corePropertiesFile.toFile().delete();
1157  } catch (Exception ex) {
1158  logger.log(Level.INFO, "Could not delete pre-existing core.properties prior to opening the core."); //NON-NLS
1159  }
1160  }
1161  }
1162 
1163  // for single user cases, we unload the core when we close the case. So we have to load the core again.
1164  CoreAdminRequest.Create createCoreRequest = new CoreAdminRequest.Create();
1165  createCoreRequest.setDataDir(dataDir.getAbsolutePath());
1166  createCoreRequest.setCoreName(collectionName);
1167  createCoreRequest.setConfigSet("AutopsyConfig"); //NON-NLS
1168  createCoreRequest.setIsLoadOnStartup(false);
1169  createCoreRequest.setIsTransient(true);
1170  localSolrServer.request(createCoreRequest);
1171 
1172  if (!coreIndexFolderExists(collectionName)) {
1173  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.noIndexDir.msg"));
1174  }
1175  }
1176  }
1177 
1178  return new Collection(collectionName, theCase, index);
1179 
1180  } catch (Exception ex) {
1181  logger.log(Level.SEVERE, "Exception during Solr collection creation.", ex); //NON-NLS
1182  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.cantOpen.msg"), ex);
1183  }
1184  }
1185 
1186  private int getNumShardsToUse() throws KeywordSearchModuleException {
1187 
1188  // if we want to use a specific sharding strategy, use that
1189  if (org.sleuthkit.autopsy.keywordsearch.UserPreferences.getMaxNumShards() > 0) {
1190  return org.sleuthkit.autopsy.keywordsearch.UserPreferences.getMaxNumShards();
1191  }
1192 
1193  // otherwise get list of all live Solr servers in the cluster
1194  List<String> solrServerList = getSolrServerList(remoteSolrServer);
1195  // shard across all available servers
1196  return solrServerList.size();
1197  }
1198 
1199  /*
1200  * Poll the remote Solr server for list of existing collections, and check if
1201  * the collection of interest exists.
1202  *
1203  * @param collectionName The name of the collection.
1204  *
1205  * @return True if the collection exists, false otherwise.
1206  *
1207  * @throws SolrServerException If there is a problem communicating with the
1208  * Solr server.
1209  * @throws IOException If there is a problem communicating with the Solr
1210  * server.
1211  */
1212  private boolean collectionExists(String collectionName) throws SolrServerException, IOException {
1213  CollectionAdminRequest.List req = new CollectionAdminRequest.List();
1214  CollectionAdminResponse response = req.process(remoteSolrServer);
1215  List<?> existingCollections = (List<?>) response.getResponse().get("collections");
1216  if (existingCollections == null) {
1217  existingCollections = new ArrayList<>();
1218  }
1219  return existingCollections.contains(collectionName);
1220  }
1221 
1222  /* NOTE: Keeping this code for reference, since it works.
1223  private boolean collectionExists(String collectionName) throws SolrServerException, IOException {
1224 
1225  // TODO we could potentially use this API. Currently set exception "Solr instance is not running in SolrCloud mode"
1226  // List<String> list = CollectionAdminRequest.listCollections(localSolrServer);
1227 
1228  CollectionAdminRequest.ClusterStatus statusRequest = CollectionAdminRequest.getClusterStatus().setCollectionName(collectionName);
1229  CollectionAdminResponse statusResponse;
1230  try {
1231  statusResponse = statusRequest.process(remoteSolrServer);
1232  } catch (RemoteSolrException ex) {
1233  // collection doesn't exist
1234  return false;
1235  }
1236 
1237  if (statusResponse == null) {
1238  return false;
1239  }
1240 
1241  NamedList error = (NamedList) statusResponse.getResponse().get("error");
1242  if (error != null) {
1243  return false;
1244  }
1245 
1246  // For some reason this returns info about all collections even though it's supposed to only return about the one we are requesting
1247  NamedList cluster = (NamedList) statusResponse.getResponse().get("cluster");
1248  NamedList collections = (NamedList) cluster.get("collections");
1249  if (collections != null) {
1250  Object collection = collections.get(collectionName);
1251  return (collection != null);
1252  } else {
1253  return false;
1254  }
1255  }*/
1256 
1257  private void createMultiUserCollection(String collectionName, int numShardsToUse) throws KeywordSearchModuleException, SolrServerException, IOException {
1258  /*
1259  * The core either does not exist or it is not loaded. Make a
1260  * request that will cause the core to be created if it does not
1261  * exist or loaded if it already exists.
1262  */
1263 
1264  Integer numShards = numShardsToUse;
1265  logger.log(Level.INFO, "numShardsToUse: {0}", numShardsToUse);
1266  Integer numNrtReplicas = 1;
1267  Integer numTlogReplicas = 0;
1268  Integer numPullReplicas = 0;
1269  CollectionAdminRequest.Create createCollectionRequest = CollectionAdminRequest.createCollection(collectionName, "AutopsyConfig", numShards, numNrtReplicas, numTlogReplicas, numPullReplicas);
1270 
1271  CollectionAdminResponse createResponse = createCollectionRequest.process(remoteSolrServer);
1272  if (createResponse.isSuccess()) {
1273  logger.log(Level.INFO, "Collection {0} successfully created.", collectionName);
1274  } else {
1275  logger.log(Level.SEVERE, "Unable to create Solr collection {0}", collectionName); //NON-NLS
1276  throw new KeywordSearchModuleException(Bundle.Server_exceptionMessage_unableToCreateCollection());
1277  }
1278 
1279  /* If we need core name:
1280  Map<String, NamedList<Integer>> status = createResponse.getCollectionCoresStatus();
1281  existingCoreName = status.keySet().iterator().next();*/
1282  if (!collectionExists(collectionName)) {
1283  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.noIndexDir.msg"));
1284  }
1285  }
1286 
1287  private void backupCollection(String collectionName, String backupName, String pathToBackupLocation) throws SolrServerException, IOException, KeywordSearchModuleException {
1288  CollectionAdminRequest.Backup backup = CollectionAdminRequest.backupCollection(collectionName, backupName)
1289  .setLocation(pathToBackupLocation);
1290 
1291  CollectionAdminResponse backupResponse = backup.process(remoteSolrServer);
1292  if (backupResponse.isSuccess()) {
1293  logger.log(Level.INFO, "Collection {0} successfully backep up.", collectionName);
1294  } else {
1295  logger.log(Level.SEVERE, "Unable to back up Solr collection {0}", collectionName); //NON-NLS
1296  throw new KeywordSearchModuleException(Bundle.Server_exceptionMessage_unableToBackupCollection());
1297  }
1298  }
1299 
1300  private void restoreCollection(String backupName, String restoreCollectionName, String pathToBackupLocation) throws SolrServerException, IOException, KeywordSearchModuleException {
1301 
1302  CollectionAdminRequest.Restore restore = CollectionAdminRequest.restoreCollection(restoreCollectionName, backupName)
1303  .setLocation(pathToBackupLocation);
1304 
1305  CollectionAdminResponse restoreResponse = restore.process(remoteSolrServer);
1306  if (restoreResponse.isSuccess()) {
1307  logger.log(Level.INFO, "Collection {0} successfully resored.", restoreCollectionName);
1308  } else {
1309  logger.log(Level.SEVERE, "Unable to restore Solr collection {0}", restoreCollectionName); //NON-NLS
1310  throw new KeywordSearchModuleException(Bundle.Server_exceptionMessage_unableToRestoreCollection());
1311  }
1312  }
1313 
1328  private boolean coreIsLoaded(String coreName) throws SolrServerException, IOException {
1329  CoreAdminResponse response = CoreAdminRequest.getStatus(coreName, localSolrServer);
1330  return response.getCoreStatus(coreName).get("instanceDir") != null; //NON-NLS
1331  }
1332 
1345  private boolean coreIndexFolderExists(String coreName) throws SolrServerException, IOException {
1346  CoreAdminResponse response = CoreAdminRequest.getStatus(coreName, localSolrServer);
1347  Object dataDirPath = response.getCoreStatus(coreName).get("dataDir"); //NON-NLS
1348  if (null != dataDirPath) {
1349  File indexDir = Paths.get((String) dataDirPath, "index").toFile(); //NON-NLS
1350  return indexDir.exists();
1351  } else {
1352  return false;
1353  }
1354  }
1355 
1367  public static IndexingServerProperties getMultiUserServerProperties(String caseDirectory) {
1368 
1369  // if "solrserver.txt" is present, use those connection settings
1370  Path serverFilePath = Paths.get(caseDirectory, "solrserver.txt"); //NON-NLS
1371  if (serverFilePath.toFile().exists()) {
1372  try {
1373  List<String> lines = Files.readAllLines(serverFilePath);
1374  if (lines.isEmpty()) {
1375  logger.log(Level.SEVERE, "solrserver.txt file does not contain any data"); //NON-NLS
1376  } else if (!lines.get(0).contains(",")) {
1377  logger.log(Level.SEVERE, "solrserver.txt file is corrupt - could not read host/port from " + lines.get(0)); //NON-NLS
1378  } else {
1379  String[] parts = lines.get(0).split(",");
1380  if (parts.length != 2) {
1381  logger.log(Level.SEVERE, "solrserver.txt file is corrupt - could not read host/port from " + lines.get(0)); //NON-NLS
1382  } else {
1383  return new IndexingServerProperties(parts[0], parts[1]);
1384  }
1385  }
1386  } catch (IOException ex) {
1387  logger.log(Level.SEVERE, "solrserver.txt file could not be read", ex); //NON-NLS
1388  }
1389  }
1390 
1391  // otherwise (or if an error occurred) determine Solr version of the current case
1392  List<Index> indexes = new ArrayList<>();
1393  try {
1394  IndexMetadata indexMetadata = new IndexMetadata(caseDirectory);
1395  indexes = indexMetadata.getIndexes();
1396  } catch (IndexMetadata.TextIndexMetadataException ex) {
1397  logger.log(Level.SEVERE, "Unable to read text index metadata file: " + caseDirectory, ex);
1398 
1399  // default to using the latest Solr version settings
1400  String host = UserPreferences.getIndexingServerHost();
1401  String port = UserPreferences.getIndexingServerPort();
1402  return new IndexingServerProperties(host, port);
1403  }
1404 
1405  // select which index to use. In practice, all cases always have only one
1406  // index but there is support for having multiple indexes.
1407  Index indexToUse = IndexFinder.identifyIndexToUse(indexes);
1408  if (indexToUse == null) {
1409  // unable to find index that can be used
1410  logger.log(Level.SEVERE, "Unable to find index that can be used for case: {0}", caseDirectory);
1411 
1412  // default to using the latest Solr version settings
1413  String host = UserPreferences.getIndexingServerHost();
1414  String port = UserPreferences.getIndexingServerPort();
1415  return new IndexingServerProperties(host, port);
1416  }
1417 
1418  // return connection info for the Solr version of the current index
1419  if (IndexFinder.getCurrentSolrVersion().equals(indexToUse.getSolrVersion())) {
1420  // Solr 8
1421  String host = UserPreferences.getIndexingServerHost();
1422  String port = UserPreferences.getIndexingServerPort();
1423  return new IndexingServerProperties(host, port);
1424  } else {
1425  // Solr 4
1426  String host = UserPreferences.getSolr4ServerHost().trim();
1427  String port = UserPreferences.getSolr4ServerPort().trim();
1428  return new IndexingServerProperties(host, port);
1429  }
1430  }
1431 
1443  public static void selectSolrServerForCase(Path rootOutputDirectory, Path caseDirectoryPath) throws KeywordSearchModuleException {
1444  // Look for the solr server list file
1445  String serverListName = "solrServerList.txt"; //NON-NLS
1446  Path serverListPath = Paths.get(rootOutputDirectory.toString(), serverListName);
1447  if (serverListPath.toFile().exists()) {
1448 
1449  // Read the list of solr servers
1450  List<String> lines;
1451  try {
1452  lines = Files.readAllLines(serverListPath);
1453  } catch (IOException ex) {
1454  throw new KeywordSearchModuleException(serverListName + " could not be read", ex); //NON-NLS
1455  }
1456 
1457  // Remove any lines that don't contain a comma (these are likely just whitespace)
1458  for (Iterator<String> iterator = lines.iterator(); iterator.hasNext();) {
1459  String line = iterator.next();
1460  if (!line.contains(",")) {
1461  // Remove the current element from the iterator and the list.
1462  iterator.remove();
1463  }
1464  }
1465  if (lines.isEmpty()) {
1466  throw new KeywordSearchModuleException(serverListName + " had no valid server information"); //NON-NLS
1467  }
1468 
1469  // Choose which server to use
1470  int rnd = new Random().nextInt(lines.size());
1471  String[] parts = lines.get(rnd).split(",");
1472  if (parts.length != 2) {
1473  throw new KeywordSearchModuleException("Invalid server data: " + lines.get(rnd)); //NON-NLS
1474  }
1475 
1476  // Split it up just to do a sanity check on the data
1477  String host = parts[0];
1478  String port = parts[1];
1479  if (host.isEmpty() || port.isEmpty()) {
1480  throw new KeywordSearchModuleException("Invalid server data: " + lines.get(rnd)); //NON-NLS
1481  }
1482 
1483  // Write the server data to a file
1484  Path serverFile = Paths.get(caseDirectoryPath.toString(), "solrserver.txt"); //NON-NLS
1485  try {
1486  caseDirectoryPath.toFile().mkdirs();
1487  if (!caseDirectoryPath.toFile().exists()) {
1488  throw new KeywordSearchModuleException("Case directory " + caseDirectoryPath.toString() + " does not exist"); //NON-NLS
1489  }
1490  Files.write(serverFile, lines.get(rnd).getBytes());
1491  } catch (IOException ex) {
1492  throw new KeywordSearchModuleException(serverFile.toString() + " could not be written", ex); //NON-NLS
1493  }
1494  }
1495  }
1496 
1500  public static class IndexingServerProperties {
1501 
1502  private final String host;
1503  private final String port;
1504 
1505  IndexingServerProperties(String host, String port) {
1506  this.host = host;
1507  this.port = port;
1508  }
1509 
1515  public String getHost() {
1516  return host;
1517  }
1518 
1524  public String getPort() {
1525  return port;
1526  }
1527  }
1528 
1534  void commit() throws SolrServerException, NoOpenCoreException {
1535  currentCoreLock.readLock().lock();
1536  try {
1537  if (null == currentCollection) {
1538  throw new NoOpenCoreException();
1539  }
1540  currentCollection.commit();
1541  } finally {
1542  currentCoreLock.readLock().unlock();
1543  }
1544  }
1545 
1546  NamedList<Object> request(SolrRequest<?> request) throws SolrServerException, RemoteSolrException, NoOpenCoreException {
1547  currentCoreLock.readLock().lock();
1548  try {
1549  if (null == currentCollection) {
1550  throw new NoOpenCoreException();
1551  }
1552  return currentCollection.request(request);
1553  } finally {
1554  currentCoreLock.readLock().unlock();
1555  }
1556  }
1557 
1568  public int queryNumIndexedFiles() throws KeywordSearchModuleException, NoOpenCoreException {
1569  currentCoreLock.readLock().lock();
1570  try {
1571  if (null == currentCollection) {
1572  throw new NoOpenCoreException();
1573  }
1574  try {
1575  return currentCollection.queryNumIndexedFiles();
1576  } catch (Exception ex) {
1577  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1578  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryNumIdxFiles.exception.msg"), ex);
1579  }
1580  } finally {
1581  currentCoreLock.readLock().unlock();
1582  }
1583  }
1584 
1594  public int queryNumIndexedChunks() throws KeywordSearchModuleException, NoOpenCoreException {
1595  currentCoreLock.readLock().lock();
1596  try {
1597  if (null == currentCollection) {
1598  throw new NoOpenCoreException();
1599  }
1600  try {
1601  return currentCollection.queryNumIndexedChunks();
1602  } catch (Exception ex) {
1603  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1604  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryNumIdxChunks.exception.msg"), ex);
1605  }
1606  } finally {
1607  currentCoreLock.readLock().unlock();
1608  }
1609  }
1610 
1620  public int queryNumIndexedDocuments() throws KeywordSearchModuleException, NoOpenCoreException {
1621  currentCoreLock.readLock().lock();
1622  try {
1623  if (null == currentCollection) {
1624  throw new NoOpenCoreException();
1625  }
1626  try {
1627  return currentCollection.queryNumIndexedDocuments();
1628  } catch (Exception ex) {
1629  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1630  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryNumIdxDocs.exception.msg"), ex);
1631  }
1632  } finally {
1633  currentCoreLock.readLock().unlock();
1634  }
1635  }
1636 
1647  public boolean queryIsFullyIndexed(long contentID) throws KeywordSearchModuleException, NoOpenCoreException {
1648  currentCoreLock.readLock().lock();
1649  try {
1650  if (null == currentCollection) {
1651  throw new NoOpenCoreException();
1652  }
1653  try {
1654  int totalNumChunks = currentCollection.queryTotalNumFileChunks(contentID);
1655  if (totalNumChunks == 0) {
1656  return false;
1657  }
1658 
1659  int numIndexedChunks = currentCollection.queryNumIndexedChunks(contentID);
1660  return numIndexedChunks == totalNumChunks;
1661  } catch (Exception ex) {
1662  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1663  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryIsIdxd.exception.msg"), ex);
1664  }
1665 
1666  } finally {
1667  currentCoreLock.readLock().unlock();
1668  }
1669  }
1670 
1682  public int queryNumFileChunks(long fileID) throws KeywordSearchModuleException, NoOpenCoreException {
1683  currentCoreLock.readLock().lock();
1684  try {
1685  if (null == currentCollection) {
1686  throw new NoOpenCoreException();
1687  }
1688  try {
1689  return currentCollection.queryTotalNumFileChunks(fileID);
1690  } catch (Exception ex) {
1691  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1692  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryNumFileChunks.exception.msg"), ex);
1693  }
1694  } finally {
1695  currentCoreLock.readLock().unlock();
1696  }
1697  }
1698 
1709  public QueryResponse query(SolrQuery sq) throws KeywordSearchModuleException, NoOpenCoreException, IOException {
1710  currentCoreLock.readLock().lock();
1711  try {
1712  if (null == currentCollection) {
1713  throw new NoOpenCoreException();
1714  }
1715  try {
1716  return currentCollection.query(sq);
1717  } catch (Exception ex) {
1718  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1719  logger.log(Level.SEVERE, "Solr query failed: " + sq.getQuery(), ex); //NON-NLS
1720  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.query.exception.msg", sq.getQuery()), ex);
1721  }
1722  } finally {
1723  currentCoreLock.readLock().unlock();
1724  }
1725  }
1726 
1738  public QueryResponse query(SolrQuery sq, SolrRequest.METHOD method) throws KeywordSearchModuleException, NoOpenCoreException {
1739  currentCoreLock.readLock().lock();
1740  try {
1741  if (null == currentCollection) {
1742  throw new NoOpenCoreException();
1743  }
1744  try {
1745  return currentCollection.query(sq, method);
1746  } catch (Exception ex) {
1747  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1748  logger.log(Level.SEVERE, "Solr query failed: " + sq.getQuery(), ex); //NON-NLS
1749  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.query2.exception.msg", sq.getQuery()), ex);
1750  }
1751  } finally {
1752  currentCoreLock.readLock().unlock();
1753  }
1754  }
1755 
1766  public TermsResponse queryTerms(SolrQuery sq) throws KeywordSearchModuleException, NoOpenCoreException {
1767  currentCoreLock.readLock().lock();
1768  try {
1769  if (null == currentCollection) {
1770  throw new NoOpenCoreException();
1771  }
1772  try {
1773  return currentCollection.queryTerms(sq);
1774  } catch (Exception ex) {
1775  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1776  logger.log(Level.SEVERE, "Solr terms query failed: " + sq.getQuery(), ex); //NON-NLS
1777  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryTerms.exception.msg", sq.getQuery()), ex);
1778  }
1779  } finally {
1780  currentCoreLock.readLock().unlock();
1781  }
1782  }
1783 
1791  void deleteDataSource(Long dataSourceId) throws IOException, KeywordSearchModuleException, NoOpenCoreException, SolrServerException {
1792  try {
1793  currentCoreLock.writeLock().lock();
1794  if (null == currentCollection) {
1795  throw new NoOpenCoreException();
1796  }
1797  currentCollection.deleteDataSource(dataSourceId);
1798  currentCollection.commit();
1799  } finally {
1800  currentCoreLock.writeLock().unlock();
1801  }
1802  }
1803 
1812  @NbBundle.Messages({
1813  "Server.getAllTerms.error=Extraction of all unique Solr terms failed:"})
1814  void extractAllTermsForDataSource(Path outputFile, ReportProgressPanel progressPanel) throws KeywordSearchModuleException, NoOpenCoreException {
1815  try {
1816  currentCoreLock.writeLock().lock();
1817  if (null == currentCollection) {
1818  throw new NoOpenCoreException();
1819  }
1820  try {
1821  currentCollection.extractAllTermsForDataSource(outputFile, progressPanel);
1822  } catch (Exception ex) {
1823  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1824  logger.log(Level.SEVERE, "Extraction of all unique Solr terms failed: ", ex); //NON-NLS
1825  throw new KeywordSearchModuleException(Bundle.Server_getAllTerms_error(), ex);
1826  }
1827  } finally {
1828  currentCoreLock.writeLock().unlock();
1829  }
1830  }
1831 
1841  public String getSolrContent(final Content content) throws NoOpenCoreException {
1842  currentCoreLock.readLock().lock();
1843  try {
1844  if (null == currentCollection) {
1845  throw new NoOpenCoreException();
1846  }
1847  return currentCollection.getSolrContent(content.getId(), 0);
1848  } finally {
1849  currentCoreLock.readLock().unlock();
1850  }
1851  }
1852 
1865  public String getSolrContent(final Content content, int chunkID) throws NoOpenCoreException {
1866  currentCoreLock.readLock().lock();
1867  try {
1868  if (null == currentCollection) {
1869  throw new NoOpenCoreException();
1870  }
1871  return currentCollection.getSolrContent(content.getId(), chunkID);
1872  } finally {
1873  currentCoreLock.readLock().unlock();
1874  }
1875  }
1876 
1886  public String getSolrContent(final long objectID) throws NoOpenCoreException {
1887  currentCoreLock.readLock().lock();
1888  try {
1889  if (null == currentCollection) {
1890  throw new NoOpenCoreException();
1891  }
1892  return currentCollection.getSolrContent(objectID, 0);
1893  } finally {
1894  currentCoreLock.readLock().unlock();
1895  }
1896  }
1897 
1908  public String getSolrContent(final long objectID, final int chunkID) throws NoOpenCoreException {
1909  currentCoreLock.readLock().lock();
1910  try {
1911  if (null == currentCollection) {
1912  throw new NoOpenCoreException();
1913  }
1914  return currentCollection.getSolrContent(objectID, chunkID);
1915  } finally {
1916  currentCoreLock.readLock().unlock();
1917  }
1918  }
1919 
1929  public static String getChunkIdString(long parentID, int childID) {
1930  return Long.toString(parentID) + Server.CHUNK_ID_SEPARATOR + Integer.toString(childID);
1931  }
1932 
1939  private void connectToEmbeddedSolrServer() throws SolrServerException, IOException {
1940  TimingMetric metric = HealthMonitor.getTimingMetric("Solr: Connectivity check");
1941  CoreAdminRequest.getStatus(null, localSolrServer);
1943  }
1944 
1956  void connectToSolrServer(String host, String port) throws SolrServerException, IOException {
1957  try (HttpSolrClient solrServer = getSolrClient("http://" + host + ":" + port + "/solr")) {
1958  connectToSolrServer(solrServer);
1959  }
1960  }
1961 
1971  private void connectToSolrServer(HttpSolrClient solrServer) throws SolrServerException, IOException {
1972  TimingMetric metric = HealthMonitor.getTimingMetric("Solr: Connectivity check");
1973  CollectionAdminRequest.ClusterStatus statusRequest = CollectionAdminRequest.getClusterStatus();
1974  CollectionAdminResponse statusResponse = statusRequest.process(solrServer);
1975  int statusCode = Integer.valueOf(((NamedList) statusResponse.getResponse().get("responseHeader")).get("status").toString());
1976  if (statusCode != 0) {
1977  logger.log(Level.WARNING, "Could not connect to Solr server "); //NON-NLS
1978  } else {
1979  logger.log(Level.INFO, "Connected to Solr server "); //NON-NLS
1980  }
1982  }
1983 
1984  private List<String> getSolrServerList(String host, String port) throws KeywordSearchModuleException {
1985  HttpSolrClient solrServer = getSolrClient("http://" + host + ":" + port + "/solr");
1986  return getSolrServerList(solrServer);
1987  }
1988 
1989  private List<String> getSolrServerList(HttpSolrClient solrServer) throws KeywordSearchModuleException {
1990 
1991  try {
1992  CollectionAdminRequest.ClusterStatus statusRequest = CollectionAdminRequest.getClusterStatus();
1993  CollectionAdminResponse statusResponse;
1994  try {
1995  statusResponse = statusRequest.process(solrServer);
1996  } catch (RemoteSolrException ex) {
1997  // collection doesn't exist
1998  return Collections.emptyList();
1999  }
2000 
2001  if (statusResponse == null) {
2002  return Collections.emptyList();
2003  }
2004 
2005  NamedList<?> error = (NamedList) statusResponse.getResponse().get("error");
2006  if (error != null) {
2007  return Collections.emptyList();
2008  }
2009 
2010  NamedList<?> cluster = (NamedList) statusResponse.getResponse().get("cluster");
2011  @SuppressWarnings("unchecked")
2012  List<String> liveNodes = (ArrayList) cluster.get("live_nodes");
2013 
2014  if (liveNodes != null) {
2015  liveNodes = liveNodes.stream()
2016  .map(serverStr -> serverStr.endsWith("_solr")
2017  ? serverStr.substring(0, serverStr.length() - "_solr".length())
2018  : serverStr)
2019  .collect(Collectors.toList());
2020  }
2021  return liveNodes;
2022  } catch (Exception ex) {
2023  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2024  throw new KeywordSearchModuleException(
2025  NbBundle.getMessage(this.getClass(), "Server.serverList.exception.msg", solrServer.getBaseURL()));
2026  }
2027  }
2028 
2029  class Collection {
2030 
2031  // handle to the collection in Solr
2032  private final String name;
2033 
2034  private final CaseType caseType;
2035 
2036  private final Index textIndex;
2037 
2038  // We use different Solr clients for different operations. HttpSolrClient is geared towards query performance.
2039  // ConcurrentUpdateSolrClient is geared towards batching solr documents for better indexing throughput. We
2040  // have implemented our own batching algorithm so we will probably not use ConcurrentUpdateSolrClient.
2041  // CloudSolrClient is geared towards SolrCloud deployments. These are only good for collection-specific operations.
2042  private HttpSolrClient queryClient;
2043  private SolrClient indexingClient;
2044 
2045  private final int maxBufferSize;
2046  private final List<SolrInputDocument> buffer;
2047  private final Object bufferLock;
2048 
2049  /* (JIRA-7521) Sometimes we get into a situation where Solr server is no longer able to index new data.
2050  * Typically main reason for this is Solr running out of memory. In this case we will stop trying to send new
2051  * data to Solr (for this collection) after certain number of consecutive batches have failed. */
2052  private static final int MAX_NUM_CONSECUTIVE_FAILURES = 5;
2053  private AtomicInteger numConsecutiveFailures = new AtomicInteger(0);
2054  private AtomicBoolean skipIndexing = new AtomicBoolean(false);
2055 
2056  private final ScheduledThreadPoolExecutor periodicTasksExecutor;
2057  private static final long PERIODIC_BATCH_SEND_INTERVAL_MINUTES = 10;
2058  private static final int NUM_BATCH_UPDATE_RETRIES = 10;
2059  private static final long SLEEP_BETWEEN_RETRIES_MS = 10000; // 10 seconds
2060 
2061  private Collection(String name, Case theCase, Index index) throws TimeoutException, InterruptedException, KeywordSearchModuleException {
2062  this.name = name;
2063  this.caseType = theCase.getCaseType();
2064  this.textIndex = index;
2065  bufferLock = new Object();
2066 
2067  if (caseType == CaseType.SINGLE_USER_CASE) {
2068  // get SolrJ client
2069  queryClient = getSolrClient("http://localhost:" + localSolrServerPort + "/solr/" + name); // HttpClient
2070  indexingClient = getSolrClient("http://localhost:" + localSolrServerPort + "/solr/" + name); // HttpClient
2071  } else {
2072  // read Solr connection info from user preferences, unless "solrserver.txt" is present
2073  queryClient = configureMultiUserConnection(theCase, index, name);
2074 
2075  // for MU cases, use CloudSolrClient for indexing. Indexing is only supported for Solr 8.
2076  if (IndexFinder.getCurrentSolrVersion().equals(index.getSolrVersion())) {
2077  IndexingServerProperties properties = getMultiUserServerProperties(theCase.getCaseDirectory());
2078  indexingClient = getCloudSolrClient(properties.getHost(), properties.getPort(), name); // CloudClient
2079  } else {
2080  indexingClient = configureMultiUserConnection(theCase, index, name); // HttpClient
2081  }
2082  }
2083 
2084  // document batching
2085  maxBufferSize = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getDocumentsQueueSize();
2086  logger.log(Level.INFO, "Using Solr document queue size = {0}", maxBufferSize); //NON-NLS
2087  buffer = new ArrayList<>(maxBufferSize);
2088  periodicTasksExecutor = new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder().setNameFormat("periodic-batched-document-task-%d").build()); //NON-NLS
2089  periodicTasksExecutor.scheduleWithFixedDelay(new SendBatchedDocumentsTask(), PERIODIC_BATCH_SEND_INTERVAL_MINUTES, PERIODIC_BATCH_SEND_INTERVAL_MINUTES, TimeUnit.MINUTES);
2090  }
2091 
2098  private final class SendBatchedDocumentsTask implements Runnable {
2099 
2100  @Override
2101  public void run() {
2102 
2103  if (skipIndexing.get()) {
2104  return;
2105  }
2106 
2107  List<SolrInputDocument> clone;
2108  synchronized (bufferLock) {
2109 
2110  if (buffer.isEmpty()) {
2111  return;
2112  }
2113 
2114  // Buffer is full. Make a clone and release the lock, so that we don't
2115  // hold other ingest threads
2116  clone = buffer.stream().collect(toList());
2117  buffer.clear();
2118  }
2119 
2120  try {
2121  // send the cloned list to Solr
2122  sendBufferedDocs(clone);
2123  } catch (KeywordSearchModuleException ex) {
2124  logger.log(Level.SEVERE, "Periodic batched document update failed", ex); //NON-NLS
2125  }
2126  }
2127  }
2128 
2134  String getName() {
2135  return name;
2136  }
2137 
2138  private Index getIndexInfo() {
2139  return this.textIndex;
2140  }
2141 
2142  private QueryResponse query(SolrQuery sq) throws SolrServerException, IOException {
2143  return queryClient.query(sq);
2144  }
2145 
2146  private NamedList<Object> request(SolrRequest<?> request) throws SolrServerException, RemoteSolrException {
2147  try {
2148  return queryClient.request(request);
2149  } catch (Exception e) {
2150  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2151  logger.log(Level.WARNING, "Could not issue Solr request. ", e); //NON-NLS
2152  throw new SolrServerException(
2153  NbBundle.getMessage(this.getClass(), "Server.request.exception.exception.msg"), e);
2154  }
2155 
2156  }
2157 
2158  private QueryResponse query(SolrQuery sq, SolrRequest.METHOD method) throws SolrServerException, IOException {
2159  return queryClient.query(sq, method);
2160  }
2161 
2162  private TermsResponse queryTerms(SolrQuery sq) throws SolrServerException, IOException {
2163  QueryResponse qres = queryClient.query(sq);
2164  return qres.getTermsResponse();
2165  }
2166 
2167  private void commit() throws SolrServerException {
2168  List<SolrInputDocument> clone;
2169  synchronized (bufferLock) {
2170  // Make a clone and release the lock, so that we don't
2171  // hold other ingest threads
2172  clone = buffer.stream().collect(toList());
2173  buffer.clear();
2174  }
2175 
2176  try {
2177  sendBufferedDocs(clone);
2178  } catch (KeywordSearchModuleException ex) {
2179  throw new SolrServerException(NbBundle.getMessage(this.getClass(), "Server.commit.exception.msg"), ex);
2180  }
2181 
2182  try {
2183  //commit and block
2184  indexingClient.commit(true, true);
2185  } catch (Exception e) {
2186  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2187  logger.log(Level.WARNING, "Could not commit index. ", e); //NON-NLS
2188  throw new SolrServerException(NbBundle.getMessage(this.getClass(), "Server.commit.exception.msg"), e);
2189  }
2190  }
2191 
2192  private void deleteDataSource(Long dsObjId) throws IOException, SolrServerException {
2193  String dataSourceId = Long.toString(dsObjId);
2194  String deleteQuery = "image_id:" + dataSourceId;
2195 
2196  queryClient.deleteByQuery(deleteQuery);
2197  }
2198 
2210  @NbBundle.Messages({
2211  "# {0} - Number of extracted terms",
2212  "ExtractAllTermsReport.numberExtractedTerms=Extracted {0} terms..."
2213  })
2214  private void extractAllTermsForDataSource(Path outputFile, ReportProgressPanel progressPanel) throws IOException, SolrServerException, NoCurrentCaseException, KeywordSearchModuleException {
2215 
2216  Files.deleteIfExists(outputFile);
2217  OpenOption[] options = new OpenOption[] { java.nio.file.StandardOpenOption.CREATE, java.nio.file.StandardOpenOption.APPEND };
2218 
2219  // step through the terms
2220  int termStep = 1000;
2221  long numExtractedTerms = 0;
2222  String firstTerm = "";
2223  while (true) {
2224  SolrQuery query = new SolrQuery();
2225  query.setRequestHandler("/terms");
2226  query.setTerms(true);
2227  query.setTermsLimit(termStep);
2228  query.setTermsLower(firstTerm);
2229  query.setTermsLowerInclusive(false);
2230 
2231  // Returned terms sorted by "index" order, which is the fastest way. Per Solr documentation:
2232  // "Retrieving terms in index order is very fast since the implementation directly uses Lucene’s TermEnum to iterate over the term dictionary."
2233  // All other sort criteria return very inconsistent and overlapping resuts.
2234  query.setTermsSortString("index");
2235 
2236  // "text" field is the schema field that we populate with (lowercased) terms
2237  query.addTermsField(Server.Schema.TEXT.toString());
2238  query.setTermsMinCount(0);
2239 
2240  // Unfortunatelly Solr "terms queries" do not support any filtering so we can't filter by data source this way.
2241  // query.addFilterQuery(Server.Schema.IMAGE_ID.toString() + ":" + dataSourceId);
2242 
2243  QueryRequest request = new QueryRequest(query);
2244  TermsResponse response = request.process(queryClient).getTermsResponse();
2245  List<Term> terms = response.getTerms(Server.Schema.TEXT.toString());
2246 
2247  if (terms == null || terms.isEmpty()) {
2248  numExtractedTerms += terms.size();
2249  progressPanel.updateStatusLabel(Bundle.ExtractAllTermsReport_numberExtractedTerms(numExtractedTerms));
2250  break;
2251  }
2252 
2253  // set the first term for the next query
2254  firstTerm = terms.get(terms.size()-1).getTerm();
2255 
2256  List<String> listTerms = terms.stream().map(Term::getTerm).collect(Collectors.toList());
2257  Files.write(outputFile, listTerms, options);
2258 
2259  numExtractedTerms += termStep;
2260  progressPanel.updateStatusLabel(Bundle.ExtractAllTermsReport_numberExtractedTerms(numExtractedTerms));
2261  }
2262  }
2263 
2272  void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException {
2273 
2274  if (skipIndexing.get()) {
2275  return;
2276  }
2277 
2278  List<SolrInputDocument> clone;
2279  synchronized (bufferLock) {
2280  buffer.add(doc);
2281  // buffer documents if the buffer is not full
2282  if (buffer.size() < maxBufferSize) {
2283  return;
2284  }
2285 
2286  // Buffer is full. Make a clone and release the lock, so that we don't
2287  // hold other ingest threads
2288  clone = buffer.stream().collect(toList());
2289  buffer.clear();
2290  }
2291 
2292  // send the cloned list to Solr
2293  sendBufferedDocs(clone);
2294  }
2295 
2303  @NbBundle.Messages({
2304  "Collection.unableToIndexData.error=Unable to add data to text index. All future text indexing for the current case will be skipped.",
2305 
2306  })
2307  private void sendBufferedDocs(List<SolrInputDocument> docBuffer) throws KeywordSearchModuleException {
2308 
2309  if (docBuffer.isEmpty()) {
2310  return;
2311  }
2312 
2313  try {
2314  boolean success = true;
2315  for (int reTryAttempt = 0; reTryAttempt < NUM_BATCH_UPDATE_RETRIES; reTryAttempt++) {
2316  try {
2317  success = true;
2318  indexingClient.add(docBuffer);
2319  } catch (Exception ex) {
2320  success = false;
2321  if (reTryAttempt < NUM_BATCH_UPDATE_RETRIES - 1) {
2322  logger.log(Level.WARNING, "Unable to send document batch to Solr. Re-trying...", ex); //NON-NLS
2323  try {
2324  Thread.sleep(SLEEP_BETWEEN_RETRIES_MS);
2325  } catch (InterruptedException ignore) {
2326  throw new KeywordSearchModuleException(
2327  NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), ex); //NON-NLS
2328  }
2329  }
2330  }
2331  if (success) {
2332  numConsecutiveFailures.set(0);
2333  if (reTryAttempt > 0) {
2334  logger.log(Level.INFO, "Batch update suceeded after {0} re-try", reTryAttempt); //NON-NLS
2335  }
2336  return;
2337  }
2338  }
2339  // if we are here, it means all re-try attempts failed
2340  logger.log(Level.SEVERE, "Unable to send document batch to Solr. All re-try attempts failed!"); //NON-NLS
2341  throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg")); //NON-NLS
2342  } catch (Exception ex) {
2343  // Solr throws a lot of unexpected exception types
2344  numConsecutiveFailures.incrementAndGet();
2345  logger.log(Level.SEVERE, "Could not add batched documents to index", ex); //NON-NLS
2346 
2347  // display message to user that that a document batch is missing from the index
2348  MessageNotifyUtil.Notify.error(
2349  NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"),
2350  NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"));
2351  throw new KeywordSearchModuleException(
2352  NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), ex); //NON-NLS
2353  } finally {
2354  if (numConsecutiveFailures.get() >= MAX_NUM_CONSECUTIVE_FAILURES) {
2355  // skip all future indexing
2356  skipIndexing.set(true);
2357  logger.log(Level.SEVERE, "Unable to add data to text index. All future text indexing for the current case will be skipped!"); //NON-NLS
2358 
2359  // display message to user that no more data will be added to the index
2360  MessageNotifyUtil.Notify.error(
2361  NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"),
2362  Bundle.Collection_unableToIndexData_error());
2363  if (RuntimeProperties.runningWithGUI()) {
2364  MessageNotifyUtil.Message.error(Bundle.Collection_unableToIndexData_error());
2365  }
2366  }
2367  docBuffer.clear();
2368  }
2369  }
2370 
2381  private String getSolrContent(long contentID, int chunkID) {
2382  final SolrQuery q = new SolrQuery();
2383  q.setQuery("*:*");
2384  String filterQuery = Schema.ID.toString() + ":" + KeywordSearchUtil.escapeLuceneQuery(Long.toString(contentID));
2385  if (chunkID != 0) {
2386  filterQuery = filterQuery + Server.CHUNK_ID_SEPARATOR + chunkID;
2387  }
2388  q.addFilterQuery(filterQuery);
2389  q.setFields(Schema.TEXT.toString());
2390  try {
2391  // Get the first result.
2392  SolrDocumentList solrDocuments = queryClient.query(q).getResults();
2393 
2394  if (!solrDocuments.isEmpty()) {
2395  SolrDocument solrDocument = solrDocuments.get(0);
2396  if (solrDocument != null) {
2397  java.util.Collection<Object> fieldValues = solrDocument.getFieldValues(Schema.TEXT.toString());
2398  if (fieldValues.size() == 1) // The indexed text field for artifacts will only have a single value.
2399  {
2400  return fieldValues.toArray(new String[0])[0];
2401  } else // The indexed text for files has 2 values, the file name and the file content.
2402  // We return the file content value.
2403  {
2404  return fieldValues.toArray(new String[0])[1];
2405  }
2406  }
2407  }
2408  } catch (Exception ex) {
2409  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2410  logger.log(Level.SEVERE, "Error getting content from Solr. Solr document id " + contentID + ", chunk id " + chunkID + ", query: " + filterQuery, ex); //NON-NLS
2411  return null;
2412  }
2413 
2414  return null;
2415  }
2416 
2417  synchronized void close() throws KeywordSearchModuleException {
2418  try {
2419 
2420  // stop the periodic batch update task. If the task is already running,
2421  // allow it to finish.
2422  ThreadUtils.shutDownTaskExecutor(periodicTasksExecutor);
2423 
2424  // We only unload cores for "single-user" cases.
2425  if (this.caseType == CaseType.MULTI_USER_CASE) {
2426  return;
2427  }
2428 
2429  CoreAdminRequest.unloadCore(this.name, localSolrServer);
2430  } catch (Exception ex) {
2431  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2432  throw new KeywordSearchModuleException(
2433  NbBundle.getMessage(this.getClass(), "Server.close.exception.msg"), ex);
2434  } finally {
2435  try {
2436  queryClient.close();
2437  queryClient = null;
2438  indexingClient.close();
2439  indexingClient = null;
2440  } catch (IOException ex) {
2441  throw new KeywordSearchModuleException(
2442  NbBundle.getMessage(this.getClass(), "Server.close.exception.msg2"), ex);
2443  }
2444  }
2445  }
2446 
2456  private int queryNumIndexedFiles() throws SolrServerException, IOException {
2458  }
2459 
2469  private int queryNumIndexedChunks() throws SolrServerException, IOException {
2470  SolrQuery q = new SolrQuery(Server.Schema.ID + ":*" + Server.CHUNK_ID_SEPARATOR + "*");
2471  q.setRows(0);
2472  int numChunks = (int) query(q).getResults().getNumFound();
2473  return numChunks;
2474  }
2475 
2486  private int queryNumIndexedDocuments() throws SolrServerException, IOException {
2487  SolrQuery q = new SolrQuery("*:*");
2488  q.setRows(0);
2489  return (int) query(q).getResults().getNumFound();
2490  }
2491 
2501  private boolean queryIsIndexed(long contentID) throws SolrServerException, IOException {
2502  String id = KeywordSearchUtil.escapeLuceneQuery(Long.toString(contentID));
2503  SolrQuery q = new SolrQuery("*:*");
2504  q.addFilterQuery(Server.Schema.ID.toString() + ":" + id);
2505  //q.setFields(Server.Schema.ID.toString());
2506  q.setRows(0);
2507  return (int) query(q).getResults().getNumFound() != 0;
2508  }
2509 
2524  private int queryTotalNumFileChunks(long contentID) throws SolrServerException, IOException {
2525  final SolrQuery q = new SolrQuery();
2526  q.setQuery("*:*");
2527  String filterQuery = Schema.ID.toString() + ":" + KeywordSearchUtil.escapeLuceneQuery(Long.toString(contentID));
2528  q.addFilterQuery(filterQuery);
2529  q.setFields(Schema.NUM_CHUNKS.toString());
2530  try {
2531  SolrDocumentList solrDocuments = query(q).getResults();
2532  if (!solrDocuments.isEmpty()) {
2533  SolrDocument solrDocument = solrDocuments.get(0);
2534  if (solrDocument != null && !solrDocument.isEmpty()) {
2535  Object fieldValue = solrDocument.getFieldValue(Schema.NUM_CHUNKS.toString());
2536  return (Integer)fieldValue;
2537  }
2538  }
2539  } catch (Exception ex) {
2540  // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2541  logger.log(Level.SEVERE, "Error getting content from Solr. Solr document id " + contentID + ", query: " + filterQuery, ex); //NON-NLS
2542  return 0;
2543  }
2544  // File not indexed
2545  return 0;
2546  }
2547 
2559  int queryNumIndexedChunks(long contentID) throws SolrServerException, IOException {
2560  SolrQuery q = new SolrQuery(Server.Schema.ID + ":" + KeywordSearchUtil.escapeLuceneQuery(Long.toString(contentID)) + Server.CHUNK_ID_SEPARATOR + "*");
2561  q.setRows(0);
2562  int numChunks = (int) query(q).getResults().getNumFound();
2563  return numChunks;
2564  }
2565  }
2566 
2567  class ServerAction extends AbstractAction {
2568 
2569  private static final long serialVersionUID = 1L;
2570 
2571  @Override
2572  public void actionPerformed(ActionEvent e) {
2573  logger.log(Level.INFO, e.paramString().trim());
2574  }
2575  }
2576 
2580  class SolrServerNoPortException extends SocketException {
2581 
2582  private static final long serialVersionUID = 1L;
2583 
2587  private final int port;
2588 
2589  SolrServerNoPortException(int port) {
2590  super(NbBundle.getMessage(Server.class, "Server.solrServerNoPortException.msg", port,
2591  Server.PROPERTIES_CURRENT_SERVER_PORT));
2592  this.port = port;
2593  }
2594 
2595  int getPortNumber() {
2596  return port;
2597  }
2598  }
2599 }
static synchronized String getConfigSetting(String moduleName, String settingName)
String getSolrContent(final long objectID)
Definition: Server.java:1886
static synchronized long[] getJavaPIDs(String argsSubQuery)
final ReentrantReadWriteLock currentCoreLock
Definition: Server.java:273
ConcurrentUpdateSolrClient getConcurrentClient(String solrUrl)
Definition: Server.java:366
boolean queryIsFullyIndexed(long contentID)
Definition: Server.java:1647
static IndexingServerProperties getMultiUserServerProperties(String caseDirectory)
Definition: Server.java:1367
Collection openCore(Case theCase, Index index)
Definition: Server.java:1098
void connectToSolrServer(HttpSolrClient solrServer)
Definition: Server.java:1971
boolean coreIsLoaded(String coreName)
Definition: Server.java:1328
Process runLocalSolr8ControlCommand(List< String > solrArguments)
Definition: Server.java:496
static final int EMBEDDED_SERVER_RETRY_WAIT_SEC
Definition: Server.java:253
List< String > getSolrServerList(String host, String port)
Definition: Server.java:1984
static final int NUM_COLLECTION_CREATION_RETRIES
Definition: Server.java:251
void backupCollection(String collectionName, String backupName, String pathToBackupLocation)
Definition: Server.java:1287
void configureSolrConnection(Case theCase, Index index)
Definition: Server.java:614
CloudSolrClient getCloudSolrClient(String host, String port, String defaultCollectionName)
Definition: Server.java:383
boolean collectionExists(String collectionName)
Definition: Server.java:1212
static TimingMetric getTimingMetric(String name)
static void selectSolrServerForCase(Path rootOutputDirectory, Path caseDirectoryPath)
Definition: Server.java:1443
void addServerActionListener(PropertyChangeListener l)
Definition: Server.java:411
static synchronized boolean settingExists(String moduleName, String settingName)
static final String HL_ANALYZE_CHARS_UNLIMITED
Definition: Server.java:227
String getSolrContent(final Content content)
Definition: Server.java:1841
boolean coreIndexFolderExists(String coreName)
Definition: Server.java:1345
void restoreCollection(String backupName, String restoreCollectionName, String pathToBackupLocation)
Definition: Server.java:1300
static synchronized void setConfigSetting(String moduleName, String settingName, String settingVal)
HttpSolrClient configureMultiUserConnection(Case theCase, Index index, String name)
Definition: Server.java:657
static final Charset DEFAULT_INDEXED_TEXT_CHARSET
default Charset to index text as
Definition: Server.java:236
InputStreamPrinterThread errorRedirectThread
Definition: Server.java:276
Process runLocalSolr4ControlCommand(List< String > solrArguments)
Definition: Server.java:546
List< String > getSolrServerList(HttpSolrClient solrServer)
Definition: Server.java:1989
QueryResponse query(SolrQuery sq)
Definition: Server.java:1709
static synchronized String getJavaPath()
static void submitTimingMetric(TimingMetric metric)
TermsResponse queryTerms(SolrQuery sq)
Definition: Server.java:1766
void createMultiUserCollection(String collectionName, int numShardsToUse)
Definition: Server.java:1257
String getSolrContent(final Content content, int chunkID)
Definition: Server.java:1865
String getSolrContent(final long objectID, final int chunkID)
Definition: Server.java:1908
HttpSolrClient getSolrClient(String solrUrl)
Definition: Server.java:357
synchronized static Logger getLogger(String name)
Definition: Logger.java:124
static String getChunkIdString(long parentID, int childID)
Definition: Server.java:1929
static boolean deleteDir(File dirPath)
Definition: FileUtil.java:47
QueryResponse query(SolrQuery sq, SolrRequest.METHOD method)
Definition: Server.java:1738

Copyright © 2012-2022 Basis Technology. Generated on: Tue Jun 27 2023
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.