Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- package net.cfday.rwise.sstservlet;
- import java.io.BufferedReader;
- import net.cfday.rwise.Exporter;
- import net.cfday.rwise.dataconversion.ConversionException;
- import net.cfday.rwise.db.utils.s3.S3Extractor;
- import net.cfday.rwise.s3datascavenger.UserCreator;
- import net.cfday.rwise.sst_servlet_backend.HbaseTablesFacetModifer.Modifier;
- import org.apache.logging.log4j.LogManager;
- import org.apache.logging.log4j.Logger;
- import org.apache.solr.client.solrj.SolrQuery;
- import org.apache.solr.client.solrj.SolrQuery.SortClause;
- import org.apache.solr.client.solrj.SolrServerException;
- import org.apache.solr.client.solrj.impl.CloudSolrClient;
- import org.apache.solr.client.solrj.request.LukeRequest;
- import org.apache.solr.client.solrj.response.FacetField;
- import org.apache.solr.client.solrj.response.FacetField.Count;
- import org.apache.solr.client.solrj.response.LukeResponse;
- import org.apache.solr.client.solrj.response.LukeResponse.FieldInfo;
- import org.apache.solr.client.solrj.response.QueryResponse;
- import org.apache.solr.common.SolrDocument;
- import org.apache.solr.common.SolrDocumentList;
- import org.apache.solr.common.SolrInputDocument;
- import org.apache.solr.common.params.CursorMarkParams;
- import org.codehaus.jettison.json.JSONArray;
- import org.codehaus.jettison.json.JSONException;
- import org.codehaus.jettison.json.JSONObject;
- import org.joda.time.DateTime;
- import org.joda.time.format.DateTimeFormatter;
- import org.joda.time.format.ISODateTimeFormat;
- import org.springframework.context.ApplicationContext;
- import org.springframework.context.support.ClassPathXmlApplicationContext;
- import javax.mail.*;
- import javax.mail.internet.InternetAddress;
- import javax.mail.internet.MimeMessage;
- import javax.servlet.ServletException;
- import javax.servlet.http.HttpServlet;
- import javax.servlet.http.HttpServletRequest;
- import javax.servlet.http.HttpServletResponse;
- import java.io.File;
- import java.io.IOException;
- import java.io.InputStreamReader;
- import java.io.PrintWriter;
- import java.net.URL;
- import java.net.URLDecoder;
- import java.util.*;
- import java.util.Map.Entry;
- import java.util.logging.Level;
- import javax.servlet.ServletConfig;
- import net.cfday.rwise.db.utils.esearch.ElasticSearchConnection;
- import net.cfday.rwise.db.utils.esearch.ElasticSearchConnector;
- import org.apache.http.client.methods.HttpPost;
- import org.json.simple.parser.ParseException;
- import rwise.database.DataService;
- import rwise.database.Datum;
- public class ElasticSearch_Servlet extends HttpServlet {
- ElasticSearchConnection elasticSearchConnection;
- private static final Logger logger = LogManager.getLogger(Servlet.class);
- public static JSONArray parsePostResponse(String responseFromQuery) throws JSONException {
- JSONObject responseJson = new JSONObject(responseFromQuery);
- // System.out.println(responseJson);
- // for (Object key : responseJson.keySet()){
- // Object value = responseJson.get(key);
- // System.out.println(key + " <:> " + value);
- // }
- JSONObject hitsJsonObject = (JSONObject) responseJson.get("hits");
- JSONArray hitsJsonArray = (JSONArray) hitsJsonObject.get("hits");
- JSONArray databaseRows = new JSONArray();
- for (int i = 0; i < hitsJsonArray.length(); i++) {
- JSONObject hitJsonObject = (JSONObject) hitsJsonArray.get(i);
- // System.out.println(hitObject);
- /*
- The information that we actually write ourselves into the database is stored under the _source field.
- I'm bringing that information out and sticking it into the outer level where the _* fields are at.
- Basically i'm getting rid of the extra JSONObject and just lumping it all together.
- */
- JSONObject databaseRow = (JSONObject) hitJsonObject.get("_source");
- hitJsonObject.remove("_source");
- Iterator databaseRowKeysIterator = databaseRow.keys();
- while (databaseRowKeysIterator.hasNext()) {
- String key = (String) databaseRowKeysIterator.next();
- Object value = databaseRow.get(key);
- hitJsonObject.put(key, value);
- }
- databaseRows.put(hitJsonObject);
- // System.out.println(hitJsonObject);
- databaseRowKeysIterator = databaseRow.keys();
- while (databaseRowKeysIterator.hasNext()) {
- String databaseRowKey = (String) databaseRowKeysIterator.next();
- Object databaseRowValue = databaseRow.get(databaseRowKey);
- // System.out.println(databaseRowKeyString + "<:>" + databaseRowValue);
- }
- // System.out.println("\n\n");
- Iterator hitJsonObjectIterator = hitJsonObject.keys();
- while (hitJsonObjectIterator.hasNext()) {
- String hitKey = (String) hitJsonObjectIterator.next();
- Object hitValue = hitJsonObject.get(hitKey);
- // System.out.println(hitKeyString + " <:> " + hitValueObject);
- }
- // System.out.println("\n\n");
- }
- hitsJsonObject = null;
- hitsJsonArray = null;
- return databaseRows;
- }
- public static void printClassTypesForParsedPostResponseFieldsAndValues(JSONArray parsedPostResponse) throws JSONException {
- for (int i = 0; i < parsedPostResponse.length(); i++) {
- JSONObject databaseRowJsonObject = (JSONObject) parsedPostResponse.get(i);
- Iterator databaseRowIterator = databaseRowJsonObject.keys();
- while (databaseRowIterator.hasNext()) {
- String databaseRowKeyObject = (String) databaseRowIterator.next();
- Object databaseRowValueObject = databaseRowJsonObject.get(databaseRowKeyObject);
- System.out.println(databaseRowKeyObject.getClass() + " {" + databaseRowKeyObject + "} <:> " + databaseRowValueObject.getClass() + " {" + databaseRowValueObject + "}");
- }
- System.out.println("\n");
- }
- }
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
- elasticSearchConnection = (ElasticSearchConnection) new ElasticSearchConnector().connect();
- elasticSearchConnection.setIndex("artifact");
- elasticSearchConnection.setType("data");
- }
- private static List<String> getStrs(String key, SolrDocument doc) {
- List<String> out = new ArrayList<>();
- List<String> tmpList = (List) doc.get(key);
- if (tmpList == null) {
- return null;
- }
- Set<String> tmpSet = new HashSet<>();
- tmpSet.addAll(tmpList);
- out.addAll(tmpSet);
- return out;
- }
- private static String getStr(String key, SolrDocument doc) {
- String out;
- Object valueObj = doc.get(key);
- if (valueObj == null) {
- return null;
- }
- if (valueObj.getClass().toString().toLowerCase().contains("array")
- || valueObj.getClass().toString().toLowerCase().contains("list")) {
- out = (String) ((List) valueObj).get(0);
- } else {
- out = (String) doc.get(key);
- }
- return out;
- }
- private static void printOutHttpServletRequest(HttpServletRequest request) {
- Enumeration params = request.getParameterNames();
- while (params.hasMoreElements()) {
- String paramName = (String) params.nextElement();
- String[] parameterValues = request.getParameterValues(paramName);
- System.out.println("Parameter Name - " + paramName);
- System.out.print("\tValue - {");
- for (String parameterValue : parameterValues) {
- System.out.print(parameterValue + " <:> ");
- }
- System.out.print("}\n");
- }
- }
- public String unescapeJsonToStringString(String jsonToString) {
- //These cases I got from json.org
- jsonToString = jsonToString.replace("\\\\", "\\"); //replace escaped backslash with single backslash
- jsonToString = jsonToString.replace("\\/", "/"); //replace escaped forwardslash with a single forwardslash
- jsonToString = jsonToString.replaceAll("\\\\u[0-9]{4}", "");//removes hex coded things
- jsonToString = jsonToString.replace("\\b", ""); //remove break
- jsonToString = jsonToString.replace("\\f", ""); //remove next page
- jsonToString = jsonToString.replace("\\n", ""); //remove new line
- jsonToString = jsonToString.replace("\\r", ""); //remove carriage return
- jsonToString = jsonToString.replace("\\t", ""); //remove tab
- return jsonToString;
- }
- public String findNameAlternatives(JSONObject databaseRow, String art_name_str) throws JSONException {
- String s3FilePath = databaseRow.getString("art_s3FilePath_pth");
- File docAsFile = new File(s3FilePath);
- if (art_name_str == null) {
- if (databaseRow.isNull("art_title_str")) {
- if (databaseRow.isNull("art_subject_str")) {
- if (databaseRow.isNull("ind_author_str")) {
- if (databaseRow.isNull("art_fileName_str")){
- art_name_str = docAsFile.getName();
- } else {
- art_name_str = databaseRow.getString("art_fileName_str");
- }
- } else {
- art_name_str = databaseRow.getString("ind_author_str");
- }
- } else {
- art_name_str = databaseRow.getString("art_subject_str");
- }
- } else {
- art_name_str = databaseRow.getString("art_title_str");
- }
- }
- if (art_name_str == null || art_name_str.equals("null")) {
- art_name_str = databaseRow.getString("id"); //utter last resort
- }
- return art_name_str;
- }
- public void addAdditionalDataForSidePanelToJsonResponseForDatabaseRow(JSONObject jsonResponseForThisDatabaseRow, JSONObject databaseRow) throws JSONException {
- String id = databaseRow.getString("id");
- try {
- String fileType = jsonResponseForThisDatabaseRow.getString("fileType");
- if (fileType.startsWith("rtf") || fileType.startsWith("email")) {
- try {
- if (!databaseRow.isNull("ind_sender_strs")) {
- JSONArray sender = databaseRow.getJSONArray("ind_sender_strs");
- jsonResponseForThisDatabaseRow.put("sender", sender);
- } else {
- }
- if (!databaseRow.isNull("ind_recipient_strs")) {
- JSONArray recepient = databaseRow.getJSONArray("ind_recipient_strs");
- jsonResponseForThisDatabaseRow.put("recipients", recepient);
- } else {
- }
- if (!databaseRow.isNull("art_date_dte")) {
- Object sentDate = (Date) databaseRow.get("art_date_dte");
- jsonResponseForThisDatabaseRow.put("sentDate", sentDate);
- } else {
- }
- } catch (Throwable th) {
- // System.out.println("WARNING:Failed to add additional information fields for this email solr document with id " + id);
- }
- } else if (fileType.startsWith("zip")) {
- try {
- int zippedFileCount = databaseRow.getInt("art_attachmentIndex_int");
- jsonResponseForThisDatabaseRow.put("zippedFilesCount", zippedFileCount);
- if (!databaseRow.isNull("ind_author_str")) {
- String createdBy = databaseRow.getString("ind_author_str");
- jsonResponseForThisDatabaseRow.put("createdBy", createdBy);
- } else {
- }
- if (!databaseRow.isNull("art_creationTime_dte")) {
- Object creationDate = databaseRow.get("art_creationTime_dte");
- jsonResponseForThisDatabaseRow.put("creationDate", creationDate);
- } else {
- }
- } catch (Throwable th) {
- // System.out.println("WARNING:Failed to add additional information fields for this zip solr document with id " + id);
- }
- } else if (fileType.startsWith("html")) {
- if (!databaseRow.isNull("art_artifactType_str") && databaseRow.getString("art_artifactType_str").equals("p3 web")) {
- String art_artifactType_str = databaseRow.getString("art_artifactType_str");
- if (!databaseRow.isNull("art_question_str")) {
- String art_question_str = databaseRow.getString("art_question_str");
- jsonResponseForThisDatabaseRow.put("p3Question", art_question_str);
- }
- if (!databaseRow.isNull("art_url_str")) {
- String art_url_str = databaseRow.getString("art_url_str");
- jsonResponseForThisDatabaseRow.put("URL", art_url_str);
- }
- }
- } else {
- try {
- if (!databaseRow.isNull("art_creationTime_dte")) {
- Object creationDate = databaseRow.get("art_creationTime_dte");
- jsonResponseForThisDatabaseRow.put("creationDate", creationDate);
- } else {
- }
- if (!databaseRow.isNull("ind_author_str")) {
- String createdBy = databaseRow.getString("ind_author_str");
- jsonResponseForThisDatabaseRow.put("createdBy", createdBy);
- } else {
- }
- } catch (Throwable th) {
- // System.out.println("WARNING:Failed to add additional information fields for this solr document with id " + id);
- }
- }
- } catch (Throwable th) {
- // System.out.println("WARNING:Failed to add additional infomration at all for this solr document with id " + id);
- }
- }
- public ArrayList<String> parseFoldersString(String folderPath, String userId) {
- // TODO: Do you really want to modify the folderPath parameter or are you just reusing a variable?
- if (!folderPath.startsWith("/")) {
- folderPath = "/" + folderPath;
- }
- if (folderPath.startsWith("/P3/" + userId + "/P3")) {
- folderPath = folderPath.replaceFirst("/P3/" + userId + "/P3", "/P3");
- } else if (folderPath.startsWith("/" + userId + "/P3")) {
- folderPath = folderPath.replaceFirst("/" + userId + "/P3", "/P3"); //not sure this case ever exists
- } else if (folderPath.startsWith("/" + userId)) {
- folderPath = folderPath.replaceFirst("/" + userId, "");
- }
- ArrayList<String> listOfFolders = new ArrayList<>();
- File folder = new File(folderPath);
- String folderName = folder.getName();
- if (folderName.equals("cfday-demo") || folderName.equals("enronDemo"))//SUPER HACKISH!
- {
- return listOfFolders;
- }
- File parentFolder = folder.getParentFile();
- if (parentFolder != null) {
- String parentFolderName = parentFolder.getName();
- String parentFolderPath = parentFolder.getPath();
- listOfFolders.addAll(parseFoldersString(parentFolderPath, userId));
- }
- if (!folderName.isEmpty()) {
- listOfFolders.add(folderName);
- }
- return listOfFolders;
- }
- public JSONObject editQuery(HttpServletRequest request, String requestType) throws JSONException, SolrServerException, IOException {
- JSONObject response = new JSONObject();
- String visibility = request.getParameter("private");
- String creator_name = request.getParameter("creator_name");
- String action = request.getParameter("action");
- CloudSolrClient queriesServer = new CloudSolrClient("localhost:11093");
- queriesServer.setDefaultCollection("facetted_queries");
- /*
- Nothing crazy here, just need somethign to put stuff into and get queries that arleady exist by id. Delete them b id too.
- */
- if (action.equals("saveTarget")) {
- DateTimeFormatter dtf = ISODateTimeFormat.dateTimeNoMillis();
- String creation_date = dtf.print(DateTime.now());
- SolrInputDocument sdoc = new SolrInputDocument();
- sdoc.setField("art_visibility_str", visibility);
- sdoc.setField("art_creationDate_dte", creation_date);
- sdoc.setField("ind_creatorName_str", creator_name);
- String name = request.getParameter("name");
- String description = request.getParameter("description");
- String data = request.getParameter("facetData");
- String id = creator_name + "|" + name;
- sdoc.setField("id", id);
- sdoc.setField("art_targetName_str", name);
- sdoc.setField("art_targetDescription_str", description);
- sdoc.setField("art_dataBlob_str", data);
- queriesServer.add(sdoc);
- } else if (action.equals("deleteTarget")) {
- String id = request.getParameter("id");
- SolrDocument solrDocument = queriesServer.getById(id);
- String requesterName = request.getParameter("username");
- String creatorFromSolr = getStr("ind_creatorName_str", solrDocument);
- if (creatorFromSolr.equals(requesterName)) {
- queriesServer.deleteById(id);
- } else {
- JSONArray errors = new JSONArray();
- errors.put(new JSONObject().put("statusCode", "409"));
- errors.put(new JSONObject().put("errorMessage", "User does not have permission to delete this row. Creator name in database does not match user name!"));
- response.put("errors", errors);
- }
- }
- queriesServer.commit();
- queriesServer.close(); // TODO: Use SolrConnection or at least use try-with-resources. (Hey, Java 8 is out now, might start using Java 7 features before we are totally out of date.)
- response.put("status", "complete");
- return response;
- }
- public JSONArray parseFolderFacetFieldForFileNavTojson(JSONObject databaseRow, String baseFolderPath, String userId) throws JSONException {
- /*
- I don't seem to do anythin with the solr client dspite passing it in.
- */
- LinkedHashMap<String, Integer> uniqueFolderPaths = new LinkedHashMap<>();
- JSONArray response = new JSONArray();
- JSONArray buckets = databaseRow.getJSONObject("aggregations").getJSONObject("art_s3FileParentFolderPath_pth").getJSONArray("buckets");
- logger.debug("Found " + buckets.length() + " different values of facet information");
- for (int i = 0; i < buckets.length(); i++) {
- JSONObject bucket = buckets.getJSONObject(i);
- int fileCount = bucket.getInt("doc_count");
- if (fileCount < 1)
- continue;
- String filePath = bucket.getString("key");
- if (filePath.startsWith("P3/")) {
- filePath = filePath.replaceFirst("P3/", "");
- }
- if (!filePath.startsWith("/")) {
- filePath = "/" + filePath;
- }
- File folderFile = new File(filePath);
- if (!baseFolderPath.startsWith("/")) {
- // TODO: Do you really want to modify the parameter baseFolderPath or are you just reusing a variable?
- baseFolderPath = "/" + baseFolderPath;
- }
- ArrayList<File> folderHierarchy = new ArrayList<File>(); //break it up into a hiearchy. so take something lke /folder1/folder2/folder and turn it into /folder1, /folder1/folder2, /folder1/folder2/folder3
- while (folderFile.getParentFile()!=null){
- folderHierarchy.add(folderFile);
- folderFile = folderFile.getParentFile();
- }
- File baseFolderFile = new File(baseFolderPath);
- for (File folder : folderHierarchy) {
- if (folder.getParentFile().compareTo(baseFolderFile) == 0) //If the parent of this folder is the same as the base folder, then we know it's only 1 level out
- {
- Integer count = uniqueFolderPaths.get(folder.getAbsolutePath());
- if (count == null)
- uniqueFolderPaths.put(folder.getAbsolutePath(), fileCount);
- else
- uniqueFolderPaths.put(folder.getAbsolutePath(), fileCount + count);
- }
- }
- }
- for (Map.Entry<String, Integer> folderInfo : uniqueFolderPaths.entrySet()) {
- Integer fileCount = folderInfo.getValue();
- String folderPath = folderInfo.getKey();
- File folder = new File(folderPath);
- String filename = folder.getName();
- File parentFolder = folder.getParentFile();
- String parentFolderPath = parentFolder.getPath();
- JSONObject folderJsonObject = new JSONObject();
- folderJsonObject.put("id", folderPath);
- folderJsonObject.put("fileType", "dir");
- folderJsonObject.put("name", filename);
- folderJsonObject.put("fileCount", fileCount);
- folderJsonObject.put("path", folderPath);
- ArrayList<String> parsedFolders = parseFoldersString(parentFolderPath, userId);
- parsedFolders.removeIf(Objects::isNull);
- folderJsonObject.put("parentFolders", parsedFolders);
- folderJsonObject.put("viewable", "false");
- response.put(folderJsonObject);
- }
- return response;
- }
- public JSONObject parseEmailAsFolderForFileNavToJson(JSONObject databaseRow, String userId) throws JSONException {
- String s3FilePath = databaseRow.getString("art_s3FilePath_pth");
- if (!s3FilePath.startsWith("/")) {
- s3FilePath = "/" + s3FilePath; //Adding an initial "/" incase the solr field value doesn't start with one.
- }
- String id = databaseRow.getString("id");
- String name = null;
- if (databaseRow.isNull("art_title_str")) {
- name = findNameAlternatives(databaseRow, name);
- } else {
- name = databaseRow.getString("art_title_str");
- }
- name = name + ".email";
- Integer art_numAttachments_int = 0;
- if (databaseRow.has("art_numAttachments_int") && databaseRow.get("art_numAttachments_int") != null)
- art_numAttachments_int = databaseRow.getInt("art_numAttachments_int");
- String art_s3FileParentFolderPath_pth = databaseRow.getString("art_s3FileParentFolderPath_pth");
- JSONObject response = new JSONObject();
- response.put("id", id);
- response.put("fileType", "email");
- response.put("name", name);
- response.put("fileCount", art_numAttachments_int);
- response.put("path", s3FilePath);
- ArrayList<String> parentFoldersArrayList = parseFoldersString(art_s3FileParentFolderPath_pth, userId);
- parentFoldersArrayList.removeIf(Objects::isNull);
- JSONArray parentFoldersJSONArray = new JSONArray(parentFoldersArrayList);
- response.put("parentFolders", parentFoldersJSONArray);
- response.put("viewable", "false");
- return response;
- }
- public JSONObject parseForFileNavToJson(JSONObject databaseRow, String userId) throws JSONException {
- /*
- Again not using the facettedServer thing, whaaaat was i doin with my life that I made these decisiions.
- */
- String id = databaseRow.getString("id");
- String art_fileType_str = databaseRow.getString("art_fileType_str");
- String art_isViewable_bool = databaseRow.getString("art_isViewable_str");
- String art_contentHash_str = databaseRow.getString("art_contentHash_str");
- String s3FilePath = databaseRow.getString("art_s3FilePath_pth");
- if (!s3FilePath.startsWith("/")) {
- s3FilePath = "/" + s3FilePath; //Adding an initial "/" incase the solr field value doesn't start with one.
- }
- String art_s3FileParentFolderPath_pth = databaseRow.getString("art_s3FileParentFolderPath_pth");
- String art_artifactType_str = databaseRow.getString("art_artifactType_str");
- if (!art_artifactType_str.equals("attachment") && art_fileType_str.equals("pst")) {
- return null;
- }
- String art_name_str = null;
- if (databaseRow.isNull("art_title_str")) {
- art_name_str = findNameAlternatives(databaseRow, art_name_str);
- } else {
- art_name_str = databaseRow.getString("art_title_str");
- }
- int art_fileSize_int;
- if (!databaseRow.isNull("art_fileSize_int")) {
- art_fileSize_int = databaseRow.getInt("art_fileSize_int");
- } else {
- art_fileSize_int = 0;
- }
- String art_fileSize_str = String.valueOf(art_fileSize_int);
- JSONObject jsonResponseForThisDatabaseRow = new JSONObject();
- jsonResponseForThisDatabaseRow.put("id", id);
- jsonResponseForThisDatabaseRow.put("fileType", art_fileType_str);
- jsonResponseForThisDatabaseRow.put("name", art_name_str);
- jsonResponseForThisDatabaseRow.put("path", s3FilePath);
- ArrayList<String> parsedFolders = parseFoldersString(art_s3FileParentFolderPath_pth, userId);
- parsedFolders.removeIf(Objects::isNull);
- jsonResponseForThisDatabaseRow.put("parentFolders", parsedFolders);
- jsonResponseForThisDatabaseRow.put("viewable", art_isViewable_bool);
- jsonResponseForThisDatabaseRow.put("fileSize", art_fileSize_str);
- jsonResponseForThisDatabaseRow.put("contentHash", art_contentHash_str);
- return jsonResponseForThisDatabaseRow;
- }
- public JSONObject parseAttachmentDatabaseRowForFileNavToJson(JSONObject databaseRow, String emailName, String userId) throws JSONException {
- /*
- Not using the connnnnecttttttionnn to solrrrrrrrrrr
- */
- String id = databaseRow.getString("id");
- String art_isViewable_bool = databaseRow.getString("art_isViewable_str");
- String art_fileType_str = "file";
- if (databaseRow.has("art_fileType_str") && databaseRow.get("art_fileType_str") != null)
- art_fileType_str = databaseRow.getString("art_fileType_str");
- String art_contentHash_str = "000";
- if (databaseRow.has("art_contentHash_str") && databaseRow.get("art_contentHash_str") != null)
- art_contentHash_str = databaseRow.getString("art_contentHash_str");
- String s3FilePath = databaseRow.getString("art_s3FilePath_pth");
- String art_s3FileParentFolderPath_pth = databaseRow.getString("art_s3FileParentFolderPath_pth");
- if (!s3FilePath.startsWith("/")) {
- s3FilePath = "/" + s3FilePath; //Adding an initial "/" incase the solr field value doesn't start with one.
- }
- File docAsFile = new File(s3FilePath);
- String docAsFileName = docAsFile.getName();
- String pathToFileAsIfItWereInTheCorrectFolderStructureAndNotLikeItIsWhereItIsPlacedInsideAPstThatIsOnTheRootFolderWhereInRealityThePstCouldBeWithinASeriesOfFolders = art_s3FileParentFolderPath_pth + "/" + docAsFileName;
- docAsFile = new File(pathToFileAsIfItWereInTheCorrectFolderStructureAndNotLikeItIsWhereItIsPlacedInsideAPstThatIsOnTheRootFolderWhereInRealityThePstCouldBeWithinASeriesOfFolders);
- String art_artifactType_str = databaseRow.getString("art_artifactType_str");
- if (!art_artifactType_str.equals("attachment") && art_fileType_str.equals("pst")) {
- return null;
- }
- String art_name_str = null;
- if (databaseRow.isNull("art_title_str")) {
- art_name_str = findNameAlternatives(databaseRow, art_name_str);
- } else {
- art_name_str = databaseRow.getString("art_title_str");
- }
- if (art_artifactType_str.equals("email")) {
- art_name_str = art_name_str + ".email";
- }
- long art_fileSize_int;
- if (!databaseRow.isNull("art_fileSize_int")) {
- art_fileSize_int = databaseRow.getInt("art_fileSize_int");
- } else {
- art_fileSize_int = 0;
- }
- String art_fileSize_str = String.valueOf(art_fileSize_int);
- JSONObject jsonResponseForThisDatabaseRow = new JSONObject();
- jsonResponseForThisDatabaseRow.put("id", id);
- jsonResponseForThisDatabaseRow.put("fileType", art_fileType_str);
- jsonResponseForThisDatabaseRow.put("name", art_name_str);
- jsonResponseForThisDatabaseRow.put("path", s3FilePath);
- ArrayList<String> parentFoldersArrayList = parseFoldersString(art_s3FileParentFolderPath_pth, userId);
- parentFoldersArrayList.removeIf(Objects::isNull);
- parentFoldersArrayList.add(emailName);
- jsonResponseForThisDatabaseRow.put("parentFolders", new JSONArray(parentFoldersArrayList));
- jsonResponseForThisDatabaseRow.put("viewable", art_isViewable_bool);
- jsonResponseForThisDatabaseRow.put("fileSize", art_fileSize_str);
- addAdditionalDataForSidePanelToJsonResponseForDatabaseRow(jsonResponseForThisDatabaseRow, databaseRow);
- jsonResponseForThisDatabaseRow.put("contentHash", art_contentHash_str);
- return jsonResponseForThisDatabaseRow;
- }
- public class FacetEdittingWorkerThread extends Thread {
- String oldFacetValue, newFacetValue, oldFacetField, newFacetField, solrCollectionDbName, username;
- Map<String, ArrayList<String>> facetFieldToSourceFieldsMapping;
- Map<String, String> facetFieldToSuffixMapping;
- ArrayList<String> sourceFieldsList = new ArrayList<>();
- int rows;
- public FacetEdittingWorkerThread(String oldFacetValue, String newFacetValue, String oldFacetField, String newFacetField, String solrCollectionDbName, String username, Map<String, ArrayList<String>> facetFieldToSourceFieldsMapping, Map<String, String> facetFieldToSuffixMapping, int rows) {
- this.oldFacetValue = oldFacetValue;
- this.newFacetValue = newFacetValue;
- this.oldFacetField = oldFacetField;
- this.newFacetField = newFacetField;
- this.solrCollectionDbName = solrCollectionDbName;
- this.username = username;
- this.facetFieldToSourceFieldsMapping = facetFieldToSourceFieldsMapping;
- this.facetFieldToSuffixMapping = facetFieldToSuffixMapping;
- for (ArrayList<String> sourceFields : this.facetFieldToSourceFieldsMapping.values()) {
- sourceFieldsList.addAll(sourceFields);
- }
- this.rows = rows;
- }
- @Override
- public void run() {
- // TODO: Modularize this code and then use SolrConnection. At least use try-with-resources!
- CloudSolrClient facettedServer = new CloudSolrClient("localhost:11093");
- facettedServer.setDefaultCollection((solrCollectionDbName));
- String cursorMark = CursorMarkParams.CURSOR_MARK_START;
- SolrQuery query = new SolrQuery(oldFacetField + ":\"" + oldFacetValue + "\"");
- query.setRows(rows);
- query.setFields(oldFacetField, facettedServer.getIdField(), "_version_");
- for (String field : sourceFieldsList) {
- query.addField(field);
- }
- query.setSort(SortClause.asc("id"));
- query.addSort(SortClause.asc("_version_"));
- query.addFilterQuery("art_username_str:\"" + username + "\"");
- try {
- boolean done = false;
- ArrayList<SolrInputDocument> sdocs = new ArrayList<>();
- while (!done) {
- query.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark);
- System.out.println("Querying solr with " + query);
- QueryResponse response = facettedServer.query(query);
- String nextCursorMark = response.getNextCursorMark();
- SolrDocumentList docs = response.getResults();
- System.out.println("Docsize is " + docs.size() + " NumFound is " + docs.getNumFound());
- for (SolrDocument doc : docs) {
- String id = getStr(facettedServer.getIdField(), doc);
- Map<String, Object> fieldModifier;
- SolrInputDocument sdoc = new SolrInputDocument();
- sdoc.setField(facettedServer.getIdField(), id);
- if (!newFacetValue.equals(oldFacetValue) && !newFacetValue.equals("")) {
- //The facet values are different, so we are changing the values. Check to see if we are also changing the facet field.
- if (!newFacetField.equals(oldFacetField) && !newFacetField.equals("")) {
- //The facet fields are also different, so we are changing both the facet values AND the facet fields
- //Go through every possible source field, for the particular copy field destination that is oldFacetField.
- //For every source, see if it has the oldFacetValue we are looking for
- //If it does, remove the old value
- //an additional check has to be made to see if we are dealing with a multi valued list. If we are, check each value for the old facet value
- //If it is a multi valued list, grab the whole list, remove the unwanted value, and then do a set.
- ArrayList<String> oldFacetFieldSourceFields = facetFieldToSourceFieldsMapping.get(oldFacetField);
- for (String oldFacetFieldSourceField : oldFacetFieldSourceFields) {
- if (oldFacetFieldSourceField.endsWith("strs")) {
- ArrayList<String> solrDocOldFacetFieldSourceFieldValue = (ArrayList) (doc.getFieldValue(oldFacetFieldSourceField));
- if (solrDocOldFacetFieldSourceFieldValue != null && solrDocOldFacetFieldSourceFieldValue.contains(oldFacetValue)) {
- while (solrDocOldFacetFieldSourceFieldValue.contains(oldFacetValue)) {
- solrDocOldFacetFieldSourceFieldValue.remove(oldFacetValue);//remove ALL occurances of it
- }
- fieldModifier = new HashMap<>();
- fieldModifier.put("set", solrDocOldFacetFieldSourceFieldValue);
- sdoc.addField(oldFacetFieldSourceField, fieldModifier);
- fieldModifier = new HashMap();
- sdocs.add(sdoc);
- }
- } else {
- String solrDocOldFacetFieldSourceFieldValue = getStr(oldFacetFieldSourceField, doc);
- if (solrDocOldFacetFieldSourceFieldValue != null && solrDocOldFacetFieldSourceFieldValue.equals(oldFacetValue)) {
- fieldModifier = new HashMap<>();
- fieldModifier.put("set", null);
- sdoc.addField(oldFacetFieldSourceField, fieldModifier);
- fieldModifier = new HashMap();
- sdocs.add(sdoc);
- }
- }
- }
- //Add the newFacetValue to some newFacetFieldDestination that will comply with the schema we have, and be picked up by the newFacetField copyField
- //THOUGHT: Potential improvement. If the new facet field is a multi valued, it might be faster if instead of atomically adding the new facet value to this list,
- //we instead got the list of values in the new facet field, wrote the value in, and then set the whole thing with the value in there.
- //THERE was a significant improvement in time when we did this same technique for deleting stuff, so I wonder if here there would be something as well.
- //Though the idea of changing facet fields is rarely used so maybe it's not a huge thing.
- String newFacetFieldDestinationField = facetFieldToSuffixMapping.get(newFacetField);
- fieldModifier = new HashMap<>();
- fieldModifier.put("add", newFacetValue);
- sdoc.addField(newFacetFieldDestinationField, fieldModifier);
- fieldModifier = new HashMap();
- sdocs.add(sdoc);
- } else {
- ArrayList<String> oldFacetFieldSourceFields = facetFieldToSourceFieldsMapping.get(oldFacetField);
- //The facet field is not different, so we are changing the value, but NOT the field.
- //Go through every possible source field, for the particular copy field destination that is oldFacetField.
- //For every source, see if it has the oldFacetValue we are looking for
- //If it does, remove the old value and add the new one
- //an additional check has to be made to see if we are dealing with a multi valued list. If we are, check each value for the old facet value
- //If it is a multi valued list, grab the whole list, remove the unwanted value, and then do a set.
- for (String oldFacetFieldSourceField : oldFacetFieldSourceFields) {
- if (oldFacetFieldSourceField.endsWith("strs")) {
- ArrayList<String> solrDocOldFacetFieldSourceFieldValue = (ArrayList) (doc.getFieldValue(oldFacetFieldSourceField));
- if (solrDocOldFacetFieldSourceFieldValue != null && solrDocOldFacetFieldSourceFieldValue.contains(oldFacetValue)) {
- while (solrDocOldFacetFieldSourceFieldValue.contains(oldFacetValue)) {
- solrDocOldFacetFieldSourceFieldValue.remove(oldFacetValue);//remove ALL occurances of it
- }
- solrDocOldFacetFieldSourceFieldValue.add(newFacetValue);
- fieldModifier = new HashMap<>();
- fieldModifier.put("set", solrDocOldFacetFieldSourceFieldValue);
- sdoc.addField(oldFacetFieldSourceField, fieldModifier);
- fieldModifier = new HashMap();
- sdocs.add(sdoc);
- }
- } else {
- String solrDocOldFacetFieldSourceFieldValue = getStr(oldFacetFieldSourceField, doc);
- if (solrDocOldFacetFieldSourceFieldValue != null && solrDocOldFacetFieldSourceFieldValue.equals(oldFacetValue)) {
- fieldModifier = new HashMap<>();
- fieldModifier.put("set", newFacetValue);
- sdoc.addField(oldFacetFieldSourceField, fieldModifier);
- fieldModifier = new HashMap();
- sdocs.add(sdoc);
- } else {
- }
- }
- }
- }
- } else //The facet values are not different, so we are not changing those. The fields may be different, we must check.
- {
- if (!newFacetField.equals(oldFacetField) && !newFacetField.equals("")) {
- //The facet fields are different, so we are changing them, but NOT the values.
- //Go through every possible source field, for the particular copy field destination that is oldFacetField.
- //For every source, see if it has the oldFacetValue we are looking for
- //If it does, remove it.
- //an additional check has to be made to see if we are dealing with a multi valued list. If we are, check each value for the old facet value
- ArrayList<String> oldFacetFieldSourceFields = facetFieldToSourceFieldsMapping.get(oldFacetField);
- for (String oldFacetFieldSourceField : oldFacetFieldSourceFields) {
- if (oldFacetFieldSourceField.endsWith("strs")) {
- ArrayList<String> solrDocOldFacetFieldSourceFieldValue = (ArrayList) (doc.getFieldValue(oldFacetFieldSourceField));
- if (solrDocOldFacetFieldSourceFieldValue != null && solrDocOldFacetFieldSourceFieldValue.contains(oldFacetValue)) {
- while (solrDocOldFacetFieldSourceFieldValue.contains(oldFacetValue)) {
- solrDocOldFacetFieldSourceFieldValue.remove(oldFacetValue);//remove ALL occurances of it
- }
- fieldModifier = new HashMap<>();
- fieldModifier.put("set", solrDocOldFacetFieldSourceFieldValue);
- sdoc.addField(oldFacetFieldSourceField, fieldModifier);
- fieldModifier = new HashMap();
- sdocs.add(sdoc);
- }
- } else {
- String solrDocOldFacetFieldSourceFieldValue = getStr(oldFacetFieldSourceField, doc);
- if (solrDocOldFacetFieldSourceFieldValue != null && solrDocOldFacetFieldSourceFieldValue.equals(oldFacetValue)) {
- fieldModifier = new HashMap<>();
- fieldModifier.put("set", null);
- sdoc.addField(oldFacetFieldSourceField, fieldModifier);
- fieldModifier = new HashMap();
- sdocs.add(sdoc);
- }
- }
- }
- //Add the oldFacetValue to some newFacetFieldDestination that will comply with the schema we have, and be picked up by the newFacetField copyField
- String newFacetFieldDestinationField = facetFieldToSuffixMapping.get(newFacetField);
- fieldModifier = new HashMap<>();
- fieldModifier.put("add", oldFacetValue);
- sdoc.addField(newFacetFieldDestinationField, fieldModifier);
- fieldModifier = new HashMap();
- sdocs.add(sdoc);
- } else {
- //The facet fields are not different, neither are the values. Do nothing.
- }
- }
- }
- if (sdocs.size() >= rows) {
- System.out.println("Adding batch of " + rows + " documents to solr");
- facettedServer.add(sdocs);
- System.out.println("Committing");
- facettedServer.commit();
- sdocs.clear();
- }
- if (cursorMark.equals(nextCursorMark)) {
- done = true;
- }
- cursorMark = nextCursorMark;
- }
- if (sdocs.size() > 0) {
- System.out.println("Adding final batch of " + sdocs.size() + " documents to solr");
- facettedServer.add(sdocs);
- System.out.println("Committing");
- facettedServer.commit();
- sdocs.clear();
- }
- } catch (Throwable th) {
- System.out.println("Something went wrong.");
- th.printStackTrace();
- try {
- facettedServer.close();
- } catch (IOException ex) {
- logger.error("ERROR", ex);
- }
- }
- try {
- facettedServer.close();
- } catch (IOException ex) {
- logger.error("ERROR", ex);
- }
- try {
- //now modify the back end values as well using Joosep's Modifier class.
- Modifier modifier = new Modifier("CleanedTagsDb", "ParsedDb");
- modifier.modify(oldFacetValue, newFacetValue);
- } catch (Throwable ex) {
- System.out.println("WARNING: FAILED TO MODIFY THE VALUES IN THE BACK END");
- }
- return;
- }
- }
- public class FacetDeletingWorkerThread extends Thread {
- String facetField, facetFieldValue, solrCollectionDbName, username;
- boolean plural;
- Map<String, ArrayList<String>> facetFieldToSourceFieldsMapping;
- Map<String, String> facetFieldToSuffixMapping;
- ArrayList<String> sourceFieldsList = new ArrayList<>();
- int rows;
- public FacetDeletingWorkerThread(String facetField, String facetFieldValue, String solrCollectionDbName, String username, Map<String, ArrayList<String>> facetFieldToSourceFieldsMapping, Map<String, String> facetFieldToSuffixMapping, int rows) {
- this.facetField = facetField;
- this.facetFieldValue = facetFieldValue;
- this.solrCollectionDbName = solrCollectionDbName;
- this.username = username;
- this.facetFieldToSourceFieldsMapping = facetFieldToSourceFieldsMapping;
- this.facetFieldToSuffixMapping = facetFieldToSuffixMapping;
- for (ArrayList<String> sourceFields : this.facetFieldToSourceFieldsMapping.values()) {
- sourceFieldsList.addAll(sourceFields);
- }
- this.rows = rows;
- }
- @Override
- public void run() {
- CloudSolrClient facettedServer = new CloudSolrClient("localhost:11093");
- facettedServer.setDefaultCollection(solrCollectionDbName);
- String cursorMark = CursorMarkParams.CURSOR_MARK_START;
- SolrQuery query = new SolrQuery(facetField + ":\"" + facetFieldValue + "\"");
- query.setRows(rows);
- query.setFields(facetField, facettedServer.getIdField(), "_version_");
- for (String field : sourceFieldsList) {
- query.addField(field);
- }
- query.setSort(SortClause.asc("id"));
- query.addSort(SortClause.asc("_version_"));
- query.addFilterQuery("art_username_str:\"" + username + "\"");
- try {
- boolean done = false;
- ArrayList<SolrInputDocument> sdocs = new ArrayList<>();
- while (!done) {
- query.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark);
- System.out.println("Querying solr with " + query);
- QueryResponse response = facettedServer.query(query);
- String nextCursorMark = response.getNextCursorMark();
- SolrDocumentList docs = response.getResults();
- System.out.println("Docsize is " + docs.size() + " NumFound is " + docs.getNumFound());
- for (SolrDocument doc : docs) {
- String id = getStr(facettedServer.getIdField(), doc);
- Map<String, Object> fieldModifier;
- SolrInputDocument sdoc = new SolrInputDocument();
- sdoc.setField(facettedServer.getIdField(), id);
- ArrayList<String> facetFieldSourceFields = facetFieldToSourceFieldsMapping.get(facetField);
- //The facet field is not different, so we are changing the value, but NOT the field.
- //Go through every possible source field, for the particular copy field destination that is facetField.
- //For every source, see if it has the facetFieldValue we are looking for
- //If it does, remove the old value
- //an additional check has to be made to see if we are dealing with a multi valued list. If we are, check each value for the old facet value
- //If it is a multi valued list, grab the whole list, remove the unwanted value, and then do a set.
- for (String facetFieldSourceField : facetFieldSourceFields) {
- if (facetFieldSourceField.endsWith("strs")) {
- ArrayList<String> solrDocOldFacetFieldSourceFieldValue = (ArrayList) (doc.getFieldValue(facetFieldSourceField));
- if (solrDocOldFacetFieldSourceFieldValue != null && solrDocOldFacetFieldSourceFieldValue.contains(facetFieldValue)) {
- while (solrDocOldFacetFieldSourceFieldValue.contains(facetFieldValue)) {
- solrDocOldFacetFieldSourceFieldValue.remove(facetFieldValue);//remove ALL occurances of it
- }
- fieldModifier = new HashMap<>();
- fieldModifier.put("set", solrDocOldFacetFieldSourceFieldValue);
- sdoc.addField(facetFieldSourceField, fieldModifier);
- fieldModifier = new HashMap();
- sdocs.add(sdoc);
- }
- } else {
- String solrDocOldFacetFieldSourceFieldValue = getStr(facetFieldSourceField, doc);
- if (solrDocOldFacetFieldSourceFieldValue != null && solrDocOldFacetFieldSourceFieldValue.equals(facetFieldValue)) {
- fieldModifier = new HashMap<>();
- fieldModifier.put("set", null);
- sdoc.addField(facetFieldSourceField, fieldModifier);
- fieldModifier = new HashMap();
- sdocs.add(sdoc);
- }
- }
- }
- }
- if (sdocs.size() >= rows) {
- System.out.println("Adding batch of " + rows + " documents to solr");
- facettedServer.add(sdocs);
- System.out.println("Committing");
- facettedServer.commit();
- sdocs.clear();
- }
- if (cursorMark.equals(nextCursorMark)) {
- done = true;
- }
- cursorMark = nextCursorMark;
- }
- if (sdocs.size() > 0) {
- System.out.println("Adding final batch of " + sdocs.size() + " documents to solr");
- facettedServer.add(sdocs);
- System.out.println("Committing");
- facettedServer.commit();
- sdocs.clear();
- }
- } catch (Throwable th) {
- System.out.println("Something bad happened.");
- th.printStackTrace();
- try {
- facettedServer.close();
- } catch (IOException ex) {
- logger.error("ERROR", ex);
- }
- }
- try {
- facettedServer.close();
- } catch (IOException ex) {
- logger.error("ERROR", ex);
- }
- try {
- //now delete form back end using Joosep's Modifier class.
- Modifier modifier = new Modifier("CleanedTagsDb", "ParsedDb");
- modifier.delete(facetFieldValue);
- } catch (Throwable ex) {
- System.out.println("WARNING: FAILED TO DELETE FROM BACK END");
- }
- return;
- }
- }
- public String[] cleanUpOldFacetValues(String[] oldFacetValuesParam) {
- String[] oldFacetValues = null;
- if (oldFacetValuesParam != null) {
- if (oldFacetValuesParam.length > 1) {
- oldFacetValues = oldFacetValuesParam; //if for some reason we have more than one paramter, it must be because we got the thing working correctly so im assuming it's safe to just call it done here
- } else if (oldFacetValuesParam.length == 1) {
- //The string in question , in the first index of this, should look like : ['value1','value2','value3']
- //OR if it's a single value it'll look like : ['value1'];
- //Either way lets get rid of the braces by doing a substring
- //We gotta parse that out into an array, so see if there is a split that can be made with "','" and if there isn't, then we know it's just one value.
- String valueStringToParse = oldFacetValuesParam[0];
- System.out.println("Old values with brackets and apostrophes -> " + valueStringToParse);
- valueStringToParse = valueStringToParse.substring(1, valueStringToParse.length() - 1); //gets ride of the braces
- //We should now have something like this : 'value1','value2','value3'
- System.out.println("Cleaned up, no brackets, still apostrophes -> " + valueStringToParse);
- String[] possibleValues = valueStringToParse.split("', '");
- if (possibleValues.length == 1) {
- //So we split and it still stayed at size one, meaning we only have one entry in the area which looks like : 'value1'.
- //So we'll clean it up and put it in it's own new shiny array. Do another substring to remove the apostrophes
- String value = possibleValues[0];
- System.out.println("Single value of -> " + value);
- value = value.substring(1, value.length() - 1);
- System.out.println("Single value with no apostrophes of -> " + value);
- oldFacetValues = new String[]{value};
- } else {
- //So we split and we actually got more than one thing, the array should look like this : ["'value1", "value2", "value3'"]
- //So again I think we're good by just cleaning up the apostrophes,which should only exist in the first and last value of the array, but we'll check each one anyways;
- ArrayList<String> cleanedUpValues = new ArrayList<>();
- System.out.println("Multiple values found in!");
- for (String possibleValue : possibleValues) {
- System.out.println("One of the values, no braces still apostrophoes maybe -> " + possibleValue);
- System.out.println(possibleValue);
- if (possibleValue.charAt(0) == '\'') {
- possibleValue = possibleValue.substring(1, possibleValue.length()); //gets rid of the first character if it is an apostrophe (can't do regex to remove all as there might be valid apostrophes in the tag)
- }
- if (possibleValue.charAt(possibleValue.length() - 1) == '\'') {
- possibleValue = possibleValue.substring(0, possibleValue.length() - 1);//gets rid of the last character if it is an apostrophe
- }
- System.out.println("One of the values, no more apostrophes (if there were any) -> " + possibleValue);
- cleanedUpValues.add(possibleValue);
- oldFacetValues = cleanedUpValues.toArray(new String[]{});
- }
- }
- }
- }
- return oldFacetValues;
- }
- public JSONObject modifyFacet(HttpServletRequest request) throws SolrServerException, IOException, JSONException {
- JSONObject response = new JSONObject();
- String nameOfContextFile = "context_" + this.getClass().getSimpleName() + ".xml";
- ApplicationContext applicationContext = new ClassPathXmlApplicationContext(nameOfContextFile);
- String solrCollectionDbName = applicationContext.getBean("solrCollectionDbName", String.class);
- CloudSolrClient facettedServer = new CloudSolrClient("localhost:11093");
- facettedServer.setDefaultCollection((solrCollectionDbName));
- ArrayList<String> specificFacetFieldSourceValues;
- Map<String, ArrayList<String>> facetFieldToSourceFieldsMapping = new HashMap<>();
- facetFieldToSourceFieldsMapping.put("person_display", new ArrayList<String>()); //These 4 are copy fields that have multiple sources.
- facetFieldToSourceFieldsMapping.put("geography_display", new ArrayList<String>());
- facetFieldToSourceFieldsMapping.put("concept_display", new ArrayList<String>());
- facetFieldToSourceFieldsMapping.put("organization_display", new ArrayList<String>());
- specificFacetFieldSourceValues = new ArrayList<String>(1); //The ones below here just have 1 specific source so I'll make the mapping here
- specificFacetFieldSourceValues.add("clusterName");
- facetFieldToSourceFieldsMapping.put("clusterName_display", specificFacetFieldSourceValues);
- specificFacetFieldSourceValues = new ArrayList<String>(1); //The ones below here just have 1 specific source so I'll make the mapping here
- specificFacetFieldSourceValues.add("ind_author_str");
- facetFieldToSourceFieldsMapping.put("ind_author_str", specificFacetFieldSourceValues);
- specificFacetFieldSourceValues = new ArrayList<String>(1);
- specificFacetFieldSourceValues.add("ind_sender_strs");
- facetFieldToSourceFieldsMapping.put("ind_sender_strs", specificFacetFieldSourceValues);
- specificFacetFieldSourceValues = new ArrayList<String>(1);
- specificFacetFieldSourceValues.add("ind_recipient_strs");
- facetFieldToSourceFieldsMapping.put("ind_recipient_strs", specificFacetFieldSourceValues);
- specificFacetFieldSourceValues = new ArrayList<String>(1);
- specificFacetFieldSourceValues.add("art_type_strs");
- facetFieldToSourceFieldsMapping.put("art_type_strs", specificFacetFieldSourceValues);
- specificFacetFieldSourceValues = new ArrayList<String>();
- Map<String, String> facetFieldToSuffixMapping = new HashMap<String, String>();
- facetFieldToSuffixMapping.put("person_display", "ind_facetEdittedInd_strs");
- facetFieldToSuffixMapping.put("geography_display", "geo_facetEdittedGeo_strs");
- facetFieldToSuffixMapping.put("concept_display", "con_facetEdittedCon_strs");
- facetFieldToSuffixMapping.put("organization_display", "org_facetEdittedOrg_strs");
- facetFieldToSuffixMapping.put("clusterName_display", "clusterName");
- facetFieldToSuffixMapping.put("ind_author_str", "ind_author_str");
- facetFieldToSuffixMapping.put("ind_sender_strs", "ind_sender_strs");
- facetFieldToSuffixMapping.put("ind_recipient_strs", "ind_recipient_strs");
- facetFieldToSuffixMapping.put("art_type_strs", "art_type_strs");
- /*
- I know which destionation fields stuff goes into so using the lukeRequest to get back all the fields, I look at the prefix and create a mapping
- that will tell me what destionation field they go to.
- */
- LukeRequest lukeRequest = new LukeRequest();
- lukeRequest.setNumTerms(0);
- LukeResponse lukeResponse = lukeRequest.process(facettedServer);
- Map<String, FieldInfo> fieldInfoMap = lukeResponse.getFieldInfo();
- for (Entry<String, FieldInfo> entry : fieldInfoMap.entrySet()) {
- String fieldName = entry.getKey();
- //Place each source field for the _display facetFields, into the mapping
- if (fieldName.startsWith("ind_")) {
- facetFieldToSourceFieldsMapping.get("person_display").add(fieldName);
- } else if (fieldName.startsWith("org_")) {
- facetFieldToSourceFieldsMapping.get("organization_display").add(fieldName);
- } else if (fieldName.startsWith("con_")) {
- facetFieldToSourceFieldsMapping.get("concept_display").add(fieldName);
- } else if (fieldName.startsWith("geo_")) {
- facetFieldToSourceFieldsMapping.get("geography_display").add(fieldName);
- }
- }
- facettedServer.close();
- for (Entry<String, ArrayList<String>> facetFieldsToSourceFieldsMapping : facetFieldToSourceFieldsMapping.entrySet()) {
- String facetField = facetFieldsToSourceFieldsMapping.getKey();
- ArrayList<String> sourceFields = facetFieldsToSourceFieldsMapping.getValue();
- System.out.println(facetField + " -> " + sourceFields);
- }
- String username = request.getParameter("username");
- String action = request.getParameter("action");
- String rows = request.getParameter("rows");
- int rowsInt;
- if (rows != null) {
- rowsInt = Integer.valueOf(rows);
- } else {
- rowsInt = 1000;
- }
- if (action.equals("modify")) {
- System.out.println("Running modify");
- String[] oldFacetValuesParam = request.getParameterValues("values");
- String[] oldFacetValues = cleanUpOldFacetValues(oldFacetValuesParam);
- if (oldFacetValues == null | oldFacetValues.length < 1) {
- System.out.println("No facet value was detected, error");
- return (new JSONObject().put("ERROR", "NO FACET VALUE DETECTED"));
- }
- String newFacetValue = request.getParameter("editValue");
- String oldFacetField = request.getParameter("field");
- String newFacetField = request.getParameter("editField");
- for (String oldFacetValue : oldFacetValues) {
- FacetEdittingWorkerThread facetEdittingWorkerThread = new FacetEdittingWorkerThread(oldFacetValue, newFacetValue, oldFacetField, newFacetField, solrCollectionDbName, username, facetFieldToSourceFieldsMapping, facetFieldToSuffixMapping, rowsInt);
- facetEdittingWorkerThread.start();
- }
- } else if (action.equals("delete")) {
- System.out.println("Running delete");
- String facetField = request.getParameter("field");
- String[] oldFacetValuesParam = request.getParameterValues("values");
- String[] oldFacetValues = cleanUpOldFacetValues(oldFacetValuesParam);
- if (oldFacetValues == null | oldFacetValues.length < 1) {
- System.out.println("No facet value was detected, error");
- return (new JSONObject().put("ERROR", "NO FACET VALUE DETECTED"));
- }
- for (String oldFacetValue : oldFacetValues) {
- FacetDeletingWorkerThread facetDeletingWorkerThread = new FacetDeletingWorkerThread(facetField, oldFacetValue, solrCollectionDbName, username, facetFieldToSourceFieldsMapping, facetFieldToSuffixMapping, rowsInt);
- facetDeletingWorkerThread.start();
- }
- }
- response.put("success", "Successfully processed and completed the modify facet request");
- return response;
- }
- private JSONObject fileNavigationRequest(HttpServletRequest request) throws JSONException, SolrServerException, IOException, ParseException {
- JSONObject response = new JSONObject();
- String nameOfContextFile = "context_" + this.getClass().getSimpleName() + ".xml";
- ApplicationContext applicationContext = new ClassPathXmlApplicationContext(nameOfContextFile);
- /*
- OK!!! When given a forward slash ("/") as a filePath here's what I have to do.
- Find out what the user's root folder is (for testing right now it's just jjones)
- put that behidn all fildpaths i use to be able to get stuff at S3 (So dont query s3 for /blah.txt but jjones/blah.txt
- DO NOT SHOW jjones to james, when I respond back he's just gonna see "/blah.txt"
- */
- String name = request.getParameter("name");
- String user_id = request.getParameter("user_id");
- String filepath = request.getParameter("filepath");
- if (filepath.equals("/")) {
- filepath = user_id;
- } else {
- filepath = user_id + filepath;
- }
- String artifactId = request.getParameter("id");
- if (artifactId == null) {
- artifactId = filepath;
- }
- //P3 hoops!
- String q = request.getParameter("query");
- JSONObject queryFromJames = new JSONObject(q);
- JSONArray mustJSONArray = queryFromJames.getJSONObject("query").getJSONObject("function_score").getJSONObject("query").getJSONObject("bool").getJSONObject("filter").getJSONObject("bool").getJSONArray("must");
- if (!filepath.endsWith(".email")) { //emails don't need this extra match, they just go straight for id or parent email id
- JSONObject matchForPath = new JSONObject();
- if (filepath.contains(user_id + "/P3")) {
- matchForPath.put("match", new JSONObject().put("art_s3FileParentFolderPath_pth.keyword", "P3/" + filepath));
- } else {
- matchForPath.put("match", new JSONObject().put("art_s3FileParentFolderPath_pth.keyword", filepath));
- }
- mustJSONArray.put(matchForPath);
- }
- JSONArray jsonResponseForOutputArray = new JSONArray();
- JSONArray jsonResponseForFolderOutputArray = new JSONArray();
- if (filepath.endsWith(".email")) {
- //only show files as emails cannot contain folder attachments that aren't zips, which are files.
- JSONObject matchForParentEmailId = new JSONObject().put("match", new JSONObject().put("art_parentEmailId_str", artifactId));
- JSONObject matchForId = new JSONObject().put("match", new JSONObject().put("id", artifactId));
- JSONArray orMatches = new JSONArray();
- orMatches.put(matchForParentEmailId);
- orMatches.put(matchForId);
- JSONObject boolForOrMatches = new JSONObject();
- boolForOrMatches.put("bool", new JSONObject().put("should", orMatches));
- mustJSONArray.put(boolForOrMatches);
- //Query it once to get just the number of things found, then update size in the query to be that number
- HttpPost queryPost = elasticSearchConnection.getHttpPostQueryFromJsonString(queryFromJames.toString());
- JSONObject postResponseJsonObject = new JSONObject(elasticSearchConnection.postToDb(queryPost));
- int hits = postResponseJsonObject.getJSONObject("hits").getInt("total");
- queryFromJames.put("size", hits);
- queryPost = elasticSearchConnection.getHttpPostQueryFromJsonString(queryFromJames.toString());
- String postResponse = elasticSearchConnection.postToDb(queryPost);
- JSONArray parsedResults = parsePostResponse(postResponse);
- logger.debug("@@@@@@@@@@@@@@@@@@@@ Found " + hits + " results for attachments in the email with the filepath of " + filepath + ". \n\n@@@@@@@@@@@@@@@@@@@@ Query Object used was : " + queryFromJames);
- for (int i = 0; i < parsedResults.length(); i++) {
- JSONObject parsedResult = parsedResults.getJSONObject(i);
- JSONObject jsonResponseForThisDatabaseRow = parseAttachmentDatabaseRowForFileNavToJson(parsedResult, name, user_id);
- if (jsonResponseForThisDatabaseRow != null) {
- jsonResponseForOutputArray.put(jsonResponseForThisDatabaseRow);
- logger.trace("EMAIL RESPONSE -> " + jsonResponseForThisDatabaseRow);
- }
- }
- //sort files in the folder based on filename.....
- jsonResponseForOutputArray = this.sortOnFileName(jsonResponseForOutputArray);
- response.put("from", "");
- } else {
- //folders and files that aren't coming from a parent email or parent pst
- //this setup for the query is just to get the list of all folders with that user. Later, the query is changed to what the user gave
- JSONObject facetQuery = new JSONObject(q);
- JSONArray mustJSONArrayForFacet = facetQuery.getJSONObject("query").getJSONObject("function_score").getJSONObject("query").getJSONObject("bool").getJSONObject("filter").getJSONObject("bool").getJSONArray("must");
- JSONObject wildCardForPath = new JSONObject();
- if (filepath.contains(user_id + "/P3")) {
- wildCardForPath.put("wildcard", new JSONObject().put("art_s3FileParentFolderPath_pth.keyword", "P3/" + filepath + "*"));
- } else {
- wildCardForPath.put("wildcard", new JSONObject().put("art_s3FileParentFolderPath_pth.keyword", filepath + "*"));
- }
- mustJSONArrayForFacet.put(wildCardForPath);
- JSONObject countSortDesc = new JSONObject().put("_count", "desc");
- JSONObject terms = new JSONObject();
- terms.put("field", "art_s3FileParentFolderPath_pth.keyword");
- terms.put("order", countSortDesc);
- terms.put("min_doc_count", 1);
- terms.put("size", Integer.MAX_VALUE);//wholllllleeeee lottttaaa facets
- JSONObject facet = new JSONObject();
- facet.put("terms", terms);
- JSONObject aggs = new JSONObject();
- aggs.put("art_s3FileParentFolderPath_pth", facet);
- facetQuery.put("aggs", aggs);
- //SO just getting facet inforation for the sake of folders
- logger.debug("The query used to get the facet information for folders is " + facetQuery);
- HttpPost queryPost = elasticSearchConnection.getHttpPostQueryFromJsonString(facetQuery.toString());
- JSONObject postResponseJsonObject = new JSONObject(elasticSearchConnection.postToDb(queryPost));
- JSONArray jsonArrayResponseForThisFacetField = parseFolderFacetFieldForFileNavTojson(postResponseJsonObject, filepath, user_id);
- jsonResponseForFolderOutputArray = (jsonArrayResponseForThisFacetField);
- logger.trace("INITIAL FOLDER RESPONSE -> " + jsonArrayResponseForThisFacetField);
- //now the actual query with data and whatnot
- int from = queryFromJames.getInt("from");
- int rows = 100;
- queryFromJames.put("size", rows);
- queryPost = elasticSearchConnection.getHttpPostQueryFromJsonString(queryFromJames.toString());
- String postResponse = elasticSearchConnection.postToDb(queryPost);
- JSONArray parsedResults = parsePostResponse(postResponse);
- Integer numFound = parsedResults.length();
- logger.debug("@@@@@@@@@@@@@@@@@@@@ Found " + numFound + " results. \n@@@@@@@@@@@@@@@@@@@@ Query Object used was : " + queryFromJames);
- for (int i = 0; i < parsedResults.length(); i++) {
- JSONObject databaseRow = parsedResults.getJSONObject(i);
- String isEmail;
- if (databaseRow.isNull("art_isEmail_str")) {
- isEmail = null;
- } else {
- isEmail = databaseRow.getString("art_isEmail_str");
- }
- String artifactType = databaseRow.getString("art_artifactType_str");
- if (isEmail != null && isEmail.equals("true")) { //add the emails as additional folders to the folder output array
- JSONObject jsonResponseForThisDatabaseRow = parseEmailAsFolderForFileNavToJson(databaseRow, user_id);
- addAdditionalDataForSidePanelToJsonResponseForDatabaseRow(jsonResponseForThisDatabaseRow, databaseRow);
- if (jsonResponseForThisDatabaseRow != null) {
- jsonResponseForFolderOutputArray.put(jsonResponseForThisDatabaseRow);
- logger.trace("EMAIL AS FOLDER RESPONSE -> " + jsonResponseForThisDatabaseRow);
- }
- } else if (artifactType.equals("attachment")) {
- /*
- Grab the parent id of the attachment
- Query for that document
- parseEmailDocAsFolderForFileNavToJson on that email thing
- add it to the jsonResponseForFolderOutputArray
- */
- String parentEmailId = databaseRow.getString("art_parentEmailId_str");
- JSONObject miniQuery = new JSONObject().put("query", new JSONObject().put("match", new JSONObject().put("id", parentEmailId)));
- logger.trace("The query used to get the email thing is " + miniQuery);
- queryPost = elasticSearchConnection.getHttpPostQueryFromJsonString(miniQuery.toString());
- postResponseJsonObject = new JSONObject(elasticSearchConnection.postToDb(queryPost));
- parsedResults = parsePostResponse(postResponse);
- //Should only be 1 response so!
- JSONObject emailThing = parsedResults.getJSONObject(0);
- JSONObject jsonResponseForThisDatabaseRow = parseEmailAsFolderForFileNavToJson(emailThing, user_id);
- addAdditionalDataForSidePanelToJsonResponseForDatabaseRow(jsonResponseForThisDatabaseRow, emailThing);
- if (jsonResponseForThisDatabaseRow != null) {
- jsonResponseForFolderOutputArray.put(jsonResponseForThisDatabaseRow);
- logger.trace("ATTACHMENT FOLDER RESPONSE -> " + jsonResponseForThisDatabaseRow);
- }
- } else {
- JSONObject jsonResponseForThisDatabaseRow = parseForFileNavToJson(databaseRow, user_id);
- addAdditionalDataForSidePanelToJsonResponseForDatabaseRow(jsonResponseForThisDatabaseRow, databaseRow);
- if (jsonResponseForThisDatabaseRow != null) {
- jsonResponseForOutputArray.put(jsonResponseForThisDatabaseRow);
- logger.trace("JUST A FILE RESPONSE -> " + jsonResponseForThisDatabaseRow);
- }
- }
- }
- //sort files in the folder based on filename.....
- jsonResponseForOutputArray = this.sortOnFileName(jsonResponseForOutputArray);
- if (rows > numFound){
- response.put("from", "");
- } else {
- response.put("from", from + rows);
- }
- }
- response.put("files", jsonResponseForOutputArray);
- logger.trace("FINAL JSON RESPONSE -> " + jsonResponseForOutputArray);
- response.put("folders", jsonResponseForFolderOutputArray);
- logger.trace("FINAL JSON FOLDER RESPONSE -> " + jsonResponseForFolderOutputArray);
- return response;
- }
- private static boolean skipBlankFiles(SolrDocument doc) {
- String art_xParsedBy_str = (String) doc.get("art_xParsedBy_str");
- if (art_xParsedBy_str != null && art_xParsedBy_str.equals("org.apache.tika.parser.EmptyParser")) {
- Long fileSize = (Long) doc.get("art_fileSize_int");
- if (fileSize != null && fileSize == 8) {
- //skip the empty files of size 8 that come from emails
- return true;
- }
- }
- return false;
- }
- Comparator<JSONObject> fileNameComparator = new Comparator<JSONObject>() {
- @Override
- public int compare(JSONObject o1, JSONObject o2) {
- if (o1 instanceof JSONObject && o2 instanceof JSONObject) {
- String s1 = "", s2 = "";
- try {
- s1 = (String) ((JSONObject) o1).get("name");
- s2 = (String) ((JSONObject) o2).get("name");
- return s1.compareTo(s2);
- } catch (JSONException ex) {
- ex.printStackTrace();
- }
- return 0;
- } else {
- return 0;
- }
- }
- };
- private JSONArray sortOnFileName(JSONArray files) {
- JSONArray out = new JSONArray();
- List<JSONObject> tmp = new ArrayList<>();
- for (int i = 0; i < files.length(); i++) {
- try {
- JSONObject file = (JSONObject) files.get(i);
- tmp.add(file);
- } catch (JSONException ex) {
- ex.printStackTrace();
- }
- }
- Collections.sort(tmp, fileNameComparator);
- for (JSONObject file : tmp) {
- out.put(file);
- }
- return out;
- }
- public class TagEdittingWorkerThread extends Thread {
- String fq, q, tagField, tagValue, user_id, action, solrCollectionDbName, dataBlob;
- int rows = 2500;
- public TagEdittingWorkerThread(String q, String tagField, String tagValue, String user_id, String action, String solrCollectionDbName, String dataBlob) {
- this.q = q;
- this.tagField = tagField;
- this.tagField = this.tagField.replace("art_", "");
- this.tagField = this.tagField.replace("_strs","");
- this.tagValue = tagValue;
- this.user_id = user_id;
- this.action = action;
- this.solrCollectionDbName = solrCollectionDbName;
- this.dataBlob = dataBlob;
- }
- @Override
- public void run() {
- elasticSearchConnection.setIndex("queries");
- String queryId = user_id + "_" + tagValue;
- /*
- As a first step, attempt to add or delete the query this tag used in the facetted_queries collection
- */
- try {
- if (action.equals("add")) {
- // DateTimeFormatter dtf = ISODateTimeFormat.dateTimeNoMillis();
- // String creation_date = dtf.print(DateTime.now());
- // JSONObject dataBlobJson = new JSONObject(dataBlob);
- //
- // Datum querySave = new Datum(queryId);
- // querySave.add(DataService.CONTENT_TYPE.artifact, "visibility", DataService.DATA_TYPE.string, false, "private");
- // querySave.add(DataService.CONTENT_TYPE.artifact, "creationDate", DataService.DATA_TYPE.date, false, creation_date);
- // querySave.add(DataService.CONTENT_TYPE.individual, "creatorName", DataService.DATA_TYPE.string, false, user_id);
- // querySave.add(DataService.CONTENT_TYPE.artifact, "targetName", DataService.DATA_TYPE.string, false, "AUTO : {" + tagValue + "} by user {" + user_id + "}");
- // querySave.add(DataService.CONTENT_TYPE.artifact, "targetDescription", DataService.DATA_TYPE.string, false, "This is the query that was used by " + user_id + " to get the subset of documents that were tagged with the tag " + tagValue + ". The tagging was performed and saved on " + creation_date);
- // querySave.add(DataService.CONTENT_TYPE.artifact, "dataBlob", DataService.DATA_TYPE.string, false, unescapeJsonToStringString(dataBlobJson.toString()));
- // elasticSearchConnection.updateOrCreate(queryId, querySave, false);
- } else if (action.equals("remove")) {
- // elasticSearchConnection.deleteById(queryId);
- }
- } catch (Throwable th) {
- System.err.println("An error occured while attempting to save/delete the query that was used to generate this tag");
- th.printStackTrace();
- }
- elasticSearchConnection.setIndex("artifact");
- JSONObject query = new JSONObject();
- try {
- query = new JSONObject(q);
- } catch (JSONException ex) {
- java.util.logging.Logger.getLogger(ElasticSearch_Servlet.class.getName()).log(Level.SEVERE, null, ex);
- }
- int from = 0;
- int rows = 500;
- boolean done = false;
- while (!done) {
- try {
- query.put("from", from);
- query.put("size", rows);
- HttpPost queryPost = elasticSearchConnection.getHttpPostQueryFromJsonString(query.toString());
- String postResponse = elasticSearchConnection.postToDb(queryPost);
- JSONArray parsedResults = parsePostResponse(postResponse);
- int numFound = parsedResults.length();
- System.out.println("Tagging batch of rows starting from number " + from + " out of " + numFound);
- for (int i = 0; i < parsedResults.length(); i++) {
- JSONObject parsedResult = parsedResults.getJSONObject(i);
- String id = parsedResult.getString("id");
- JSONArray tagFieldValues = null;
- if (parsedResult.has(tagField) && parsedResult.get(tagField) != null) {
- tagFieldValues = parsedResult.getJSONArray(tagField);
- } else {
- tagFieldValues = new JSONArray();
- }
- System.out.println("GOT TAG ARRAY OF -> " + parsedResult);
- Datum datum = new Datum(id);
- if (action.equals("add")) {
- datum.add(DataService.CONTENT_TYPE.artifact, tagField, DataService.DATA_TYPE.string, true, tagValue);
- elasticSearchConnection.updateOrCreate(id, datum, false);
- } else if (action.equals("remove")) {
- System.out.println("Removing " + tagValue + " from " + tagFieldValues);
- JSONArray forPrintOut = new JSONArray();
- for (int j = 0; j < tagFieldValues.length(); j++) {
- String tagFieldValue = tagFieldValues.getString(j);
- if (tagFieldValue.equals(tagValue)){
- continue; //Do not add if it's the one we are trying to remove.
- }
- else {
- forPrintOut.put(tagFieldValue);
- datum.add(DataService.CONTENT_TYPE.artifact, tagField, DataService.DATA_TYPE.string, true, tagFieldValue);
- }
- }
- System.out.println("New JSONArray of tags is " + forPrintOut);
- elasticSearchConnection.updateOrCreate(id, datum, true); //Have to overwrite multi-valued field becuase i'm giving it less than what it has.
- }
- }
- if (rows > numFound) {
- done = true;
- } else {
- from = from + rows;
- done = false;
- }
- } catch (JSONException ex) {
- java.util.logging.Logger.getLogger(ElasticSearch_Servlet.class.getName()).log(Level.SEVERE, null, ex);
- } catch (ParseException ex) {
- java.util.logging.Logger.getLogger(ElasticSearch_Servlet.class.getName()).log(Level.SEVERE, null, ex);
- }
- }
- }
- }
- public JSONObject editTag(HttpServletRequest request) throws JSONException {
- JSONObject response = new JSONObject();
- String q = request.getParameter("query");
- String tagField = request.getParameter("tagField");
- String tagValue = request.getParameter("tagValue");
- String user_id = request.getParameter("user_id");
- String action = request.getParameter("action");
- String dataBlob = request.getParameter("facetData");
- String nameOfContextFile = "context_" + this.getClass().getSimpleName() + ".xml";
- ApplicationContext applicationContext = new ClassPathXmlApplicationContext(nameOfContextFile);
- String solrCollectionDbName = applicationContext.getBean("solrCollectionDbName", String.class);
- TagEdittingWorkerThread tagEdittingWorkerThread = new TagEdittingWorkerThread(q, tagField, tagValue, user_id, action, solrCollectionDbName, dataBlob);
- tagEdittingWorkerThread.start();
- response.put("success", "Done with the tag editting requesting.");
- return response;
- }
- public class UserActionWorkerThread extends Thread {
- String action, username, userId, type, userMappingCollectionName, newUserBucketName, sampleFilesBucketName, sampleFilesFolderPath;
- int rows = 2500;
- public UserActionWorkerThread(String action, String username, String userId, String type, String userMappingCollectionName, String newUserBucketName, String sampleFilesBucketName, String sampleFilesFolderPath) {
- this.action = action;
- this.username = username;
- this.userId = userId;
- this.type = type;
- this.userMappingCollectionName = userMappingCollectionName;
- this.newUserBucketName = newUserBucketName;
- this.sampleFilesBucketName = sampleFilesBucketName;
- this.sampleFilesFolderPath = sampleFilesFolderPath;
- }
- @Override
- public void run() {
- String success;
- JSONObject response = new JSONObject();
- System.out.println("Connecting to solr");
- CloudSolrClient facettedServer = new CloudSolrClient("localhost:11093");
- System.out.println("Setting default collection to {" + userMappingCollectionName + "}");
- facettedServer.setDefaultCollection(userMappingCollectionName);
- if (username == null) {
- success = "Failed. No username provided.";
- } else if (action == null) {
- success = "Failed. No action provided.";
- } else {
- boolean isDemo = false;
- if (type != null && type.equals("demo")) {
- isDemo = true;
- }
- switch (action) {
- case "create": {
- DateTime now = DateTime.now();
- System.out.println("User creation started at " + now);
- System.out.println("Creating SolrInputDocument for the new user to be put into " + userMappingCollectionName);
- SolrInputDocument sdoc = new SolrInputDocument();
- sdoc.setField("art_sstUserId_str", username);
- if (userId != null) {
- sdoc.setField("art_s3UserFolder_str", "MBS-" + userId);
- } else {
- sdoc.setField("art_s3UserFolder_str", username);
- }
- sdoc.setField("art_type_str", type);
- String id = String.valueOf(sdoc.toString().hashCode());
- sdoc.setField("id", id);
- System.out.println("Document was created, here it is \n" + sdoc);
- System.out.println("Adding said document");
- try {
- facettedServer.add(sdoc);
- System.out.println("Committing said document");
- facettedServer.commit();
- } catch (SolrServerException | IOException ex) {
- java.util.logging.Logger.getLogger(Servlet.class.getName()).log(Level.SEVERE, null, ex);
- }
- System.out.println("Moving over sample data");
- String query = "art_username_str:enronDemo";
- String bucket = "sst-test.output";
- UserCreator userCreator = new UserCreator();
- userCreator.createNewUserByQueryNoFileCopy(query, username, bucket);
- userCreator.close();
- System.out.println("Finished moving over sample data, and correcting the data.");
- DateTime then = DateTime.now();
- System.out.println("User creation finished at " + then);
- Long timeSpent = then.getMillis() - now.getMillis();
- System.out.println("User creation took " + (timeSpent / 1000) + " seconds OR " + ((timeSpent / 1000) / 60) + " minutes");
- }
- success = "Created " + username + (isDemo ? " as demo account" : "");
- break;
- case "modify": {
- //To be determined what to switch it to.
- SolrInputDocument sdoc = new SolrInputDocument();
- System.out.println("Flipping user type for user " + username);
- sdoc.setField("id", username);
- Map<String, Object> fieldModifier = new HashMap<>();
- if (isDemo) {
- fieldModifier.put("set", "not-demo");
- } else {
- fieldModifier.put("set", "demo");
- }
- sdoc.setField("art_type_str", fieldModifier);
- System.out.println("Adding and committing");
- try {
- facettedServer.add(sdoc);
- facettedServer.commit();
- } catch (SolrServerException | IOException ex) {
- java.util.logging.Logger.getLogger(Servlet.class.getName()).log(Level.SEVERE, null, ex);
- }
- }
- success = "Modified " + username + (isDemo ? " to be a demo account" : " to not be a demo account");
- break;
- case "delete": {
- System.out.println("Deleting user by calling deleteById in solr for user " + username);
- try {
- facettedServer.deleteById(username);
- System.out.println("Committing delete");
- facettedServer.commit();
- } catch (SolrServerException | IOException ex) {
- java.util.logging.Logger.getLogger(Servlet.class.getName()).log(Level.SEVERE, null, ex);
- }
- //Add the bit to delete the stuff from their s3 folder.
- S3Extractor s3Utils = new S3Extractor();
- s3Utils.deleteFileOrFolder(newUserBucketName, username + "/");
- }
- success = "Deleted " + username;
- break;
- default:
- success = "Failed. Unrecognized action '" + action + "'";
- }
- }
- System.out.println("Closing connection to solr");
- try {
- facettedServer.close();
- } catch (IOException ex) {
- java.util.logging.Logger.getLogger(Servlet.class.getName()).log(Level.SEVERE, null, ex);
- }
- }
- }
- /**
- * @param request
- * @return
- */
- public JSONObject userAction(HttpServletRequest request) throws JSONException, SolrServerException, IOException {
- String nameOfContextFile = "context_" + this.getClass().getSimpleName() + ".xml";
- ApplicationContext applicationContext = new ClassPathXmlApplicationContext(nameOfContextFile);
- String userMappingCollectionName = applicationContext.getBean("solrUserMappingCollectionDbName", String.class);
- String newUserBucketName = applicationContext.getBean("newUserBucketName", String.class);
- String sampleFilesBucketName = applicationContext.getBean("sampleFilesBucketName", String.class);
- String sampleFilesFolderPath = applicationContext.getBean("sampleFilesFolderPath", String.class);
- String action = request.getParameter("command");
- String username = request.getParameter("username");
- String type = request.getParameter("type");
- String userId = request.getParameter("userId");
- UserActionWorkerThread userActionWorkerThread = new UserActionWorkerThread(action, username, userId, type, userMappingCollectionName, newUserBucketName, sampleFilesBucketName, sampleFilesFolderPath);
- userActionWorkerThread.start();
- JSONObject response = new JSONObject();
- response.put("success", "Successfully took in the request for user action");
- logger.debug("RESPONSE: " + response);
- return response;
- }
- public JSONObject exportFiles(HttpServletRequest request) throws JSONException, ConversionException {
- String tagField = request.getParameter("tagField");
- String tag = request.getParameter("tagValue");
- String userName = request.getParameter("user_id");
- String folderName = request.getParameter("folderName");
- String email = request.getParameter("email");
- logger.info("Exporting tag '" + tag + "'");
- String nameOfContextFile = "context_" + this.getClass().getSimpleName() + ".xml";
- ApplicationContext applicationContext = new ClassPathXmlApplicationContext(nameOfContextFile);
- Exporter exporter = applicationContext.getBean("exporter", Exporter.class);
- exporter.setTag(tag);
- URL presignedURL = null;
- try {
- presignedURL = exporter.export(userName, folderName);
- if (presignedURL == null) {
- logger.error("Could not convert & upload documents for '" + tag + "'");
- JSONObject failure = new JSONObject();
- failure.put("status", "failed to convert and upload documents for tag {" + tag + "}");
- return failure;
- } else {
- System.out.println(tag + "\t" + presignedURL);
- }
- } catch (ConversionException e) {
- logger.error("Error setting up pdf conversion.", e);
- }
- logger.info("Sending email");
- String messageSubject = "Your tag has been exported. Here is the link to where you can download the files.";
- String messageBody = "Click on the link below to be able to download the files that have been exported using the tag of {" + tag + "}"
- + "\n" + presignedURL;
- sendEmail(email, messageSubject, messageBody);
- JSONObject success = new JSONObject();
- success.put("status", "success");
- return success;
- }
- public void sendEmail(String toEmail, String messageSubject, String messageBody) {
- String username = "donotreply@cfday.net";
- final String password = "1966GTO!";
- // Get system properties
- Properties props = new Properties();
- props.put("mail.smtp.auth", "true");
- props.put("mail.smtp.starttls.enable", "true");
- props.put("mail.smtp.host", "smtp.office365.com");//email-smtp.us-east-1.amazonaws.com");
- props.put("mail.smtp.port", "587");
- Session session = Session.getInstance(props,
- new javax.mail.Authenticator() {
- protected PasswordAuthentication getPasswordAuthentication() {
- return new PasswordAuthentication(username, password);
- }
- });
- try {
- Message message = new MimeMessage(session);
- message.setFrom(new InternetAddress("DoNotReply@cfday.net"));
- message.setRecipients(Message.RecipientType.TO,
- InternetAddress.parse(toEmail));
- message.setSubject(messageSubject);
- message.setText(messageBody);
- Transport.send(message);
- System.out.println("Done");
- } catch (MessagingException e) {
- throw new RuntimeException(e);
- }
- }
- public JSONObject showEmailChain(HttpServletRequest request) throws SolrServerException, IOException {
- String emailId = request.getParameter("emailId");
- String username = request.getParameter("usernName");
- String whatToGiveBack = request.getParameter("whatToGiveBack");
- String nameOfContextFile = "context_" + this.getClass().getSimpleName() + ".xml";
- ApplicationContext applicationContext = new ClassPathXmlApplicationContext(nameOfContextFile);
- String solrCollectionDbName = applicationContext.getBean("solrCollectionDbName", String.class);
- JSONObject response = new JSONObject();
- System.out.println("Connecting to solr");
- CloudSolrClient facettedServer = new CloudSolrClient("localhost:11093");
- System.out.println("Setting default collection to {" + solrCollectionDbName + "}");
- facettedServer.setDefaultCollection(solrCollectionDbName);
- String queryString;
- if (whatToGiveBack.equals("OnlyEmails")) {
- queryString = "+id:\"" + emailId + "\" +art_messageClass_str:\"IPM.Note\"";
- } else {
- queryString = "+id:\"" + emailId + "\"";
- }
- SolrQuery solrQuery = new SolrQuery(queryString);
- solrQuery.setFields("id", "_version_", "art_threadIndex_str", "art_threadTopic_str", "art_messageClass_str");
- QueryResponse qp = facettedServer.query(solrQuery);
- SolrDocumentList solrDocumentList = qp.getResults(); //should only have one thing in it, the one email
- SolrDocument solrDocumentOfThatEmail = solrDocumentList.get(0);
- String threadIndex = getStr("art_threadIndex_str", solrDocumentOfThatEmail);
- String threadTopic = getStr("art_threadTopic_str", solrDocumentOfThatEmail);
- /*
- Now that we have the threadIndex and topic, use Joosep's code to get back a list of SolrDocuments that represents that email chain.
- */
- ArrayList<SolrDocument> emailChain = null; //Call Joosep's code.
- return response;
- }
- public JSONObject caseAndCustodian(HttpServletRequest request) throws JSONException {
- JSONObject response = new JSONObject();
- String email = request.getParameter("email");
- String userName = request.getParameter("user_id");
- String caseString = request.getParameter("case");
- String custodianString = request.getParameter("custodian");
- String fileName = request.getParameter("fileName");
- if (fileName == null) {
- fileName = "TestFile";
- }
- String pathForUpload = userName + "/" + caseString + "/" + custodianString + "/" + fileName;
- logger.info("Generating upload url for path '" + pathForUpload + "'");
- String nameOfContextFile = "context_" + this.getClass().getSimpleName() + ".xml";
- ApplicationContext applicationContext = new ClassPathXmlApplicationContext(nameOfContextFile);
- Exporter exporter = applicationContext.getBean("exporter", Exporter.class);
- URL presignedURL = null;
- try {
- presignedURL = exporter.generateUploadUrl(pathForUpload);
- } catch (Throwable e) {
- logger.error("Error generating url or uploading to path " + pathForUpload, e);
- }
- // logger.info("Sending email");
- // String messageSubject = "URL for uploading to S3";
- // String messageBody = "Click on the link below to upload a file to the path {" + pathForUpload + "}"
- // + "\n" + presignedURL;
- // sendEmail(email, messageSubject, messageBody);
- response.put("URL", presignedURL);
- return response;
- }
- public String processPutRequest(HttpServletRequest request, HttpServletResponse response) throws JSONException, IOException {
- /*
- You can't get stuff out of Put's like you can with POSTs so here's a bit of code to take the data and put it into a map.
- */
- BufferedReader br = new BufferedReader(new InputStreamReader(request.getInputStream()));
- String data = br.readLine();
- String[] keyValuesArray = data.split("&");
- Map<String, String> keyValuesMap = new HashMap<>();
- for (String keyValue : keyValuesArray) {
- String[] keyValueArray = keyValue.split("=");
- String key = URLDecoder.decode(keyValueArray[0], "UTF-8");
- String value = URLDecoder.decode(keyValueArray[1], "UTF-8");
- keyValuesMap.put(key, value);
- }
- System.out.println(keyValuesMap);
- String requestType = keyValuesMap.get("requestType");
- JSONObject responseObject = new JSONObject();
- String status = "no response";
- if (requestType == null) {
- responseObject.put("response", "ERROR : Response type is not set.");
- } else {
- responseObject.put("requestType", requestType);
- try {
- switch (requestType) {
- default:
- responseObject.put("response", "PUTs are not supported as of yet.");
- }
- } catch (Throwable th) {
- logger.error("Error while processing " + requestType + " request.", th);
- responseObject.put("error", th.getMessage());
- }
- }
- String encodedResponseString = responseObject.toString();
- encodedResponseString = unescapeJsonToStringString(encodedResponseString);
- logger.trace("RESPONSE: " + encodedResponseString);
- System.gc();
- return encodedResponseString;
- }
- public String processRequest(HttpServletRequest request, HttpServletResponse response) throws SolrServerException, IOException, JSONException {
- String requestType = request.getParameter("requestType");
- JSONObject responseObject = new JSONObject();
- String status = "no response";
- printOutHttpServletRequest(request);
- if (requestType == null) {
- responseObject.put("response", "ERROR : Response type is not set.");
- } else {
- responseObject.put("requestType", requestType);
- try {
- switch (requestType) {
- case "fileNav":
- responseObject.put("response", fileNavigationRequest(request));
- break;
- case "editQuery":
- responseObject.put("response", editQuery(request, requestType));
- break;
- case "modifyFacet":
- responseObject.put("response", modifyFacet(request));
- break;
- case "editTag":
- responseObject.put("response", editTag(request));
- break;
- case "userAction":
- responseObject.put("response", userAction(request));
- break;
- case "exportFiles":
- responseObject.put("response", exportFiles(request));
- break;
- case "showEmailChain":
- responseObject.put("response", showEmailChain(request));
- break;
- case "uploadLegalFiles":
- responseObject.put("response", caseAndCustodian(request));
- break;
- default:
- responseObject.put("response", "ERROR : Unrecognized request type '" + requestType + "'");
- }
- } catch (Throwable th) {
- logger.error("Error while processing " + requestType + " request.", th);
- responseObject.put("error", th.getMessage());
- }
- }
- String encodedResponseString = responseObject.toString();
- encodedResponseString = unescapeJsonToStringString(encodedResponseString);
- logger.trace("RESPONSE: " + encodedResponseString);
- System.gc();
- return encodedResponseString;
- }
- // <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
- /**
- * Handles the HTTP <code>GET</code> method.
- *
- * @param request servlet request
- * @param response servlet response
- * @throws ServletException if a servlet-specific error occurs
- * @throws IOException if an I/O error occurs
- */
- @Override
- protected void doGet(HttpServletRequest request, HttpServletResponse response)
- throws ServletException, IOException {
- request.setCharacterEncoding("UTF-8");
- response.setContentType("text/html;charset=UTF-8");
- try {
- String encodedString = processRequest(request, response);
- try (PrintWriter out = response.getWriter()) {
- out.println(encodedString);
- }
- } catch (Throwable ex) {
- logger.error("Error while handling Get request.", ex);
- response.sendError(409, "Failed to process your get request. Check the back end logs for the stack trace");
- // TODO: { "message":"OK, why not use an actual JSON parser????", "background":"delirious screaming", "category":"mindless reinventing of stone age wheels." }
- String errorMessage = "{\n"
- + " \"errors\":\n"
- + " [{\"statusCode\":\"409\",\n"
- + " \"errorMessage\":\"Failed to process your get request. Check the back end logs for the stack trace.\"}]\n"
- + " }";
- try (PrintWriter out = response.getWriter()) {
- out.println(errorMessage);
- }
- }
- System.gc();
- }
- @Override
- protected void doPost(HttpServletRequest request, HttpServletResponse response)
- throws ServletException, IOException {
- request.setCharacterEncoding(("UTF-8"));
- response.setContentType("text/html;charset=UTF-8");
- try {
- String encodedString = processRequest(request, response);
- try (PrintWriter out = response.getWriter()) {
- out.println(encodedString);
- }
- } catch (Throwable ex) {
- logger.error("Error while handling Post request.", ex);
- response.sendError(409, "Failed to process your Post request. Check the back end logs for the stack trace");
- String errorMessage = "{\n"
- + " \"errors\":\n"
- + " [{\"statusCode\":\"409\",\n"
- + " \"errorMessage\":\"Failed to process your Post request. Check the back end logs for the stack trace.\"}]\n"
- + " }";
- try (PrintWriter out = response.getWriter()) {
- out.println(errorMessage);
- }
- }
- System.gc();
- }
- @Override
- protected void doPut(HttpServletRequest request, HttpServletResponse response)
- throws ServletException, IOException {
- request.setCharacterEncoding(("UTF-8"));
- response.setContentType("text/html;charset=UTF-8");
- try {
- String encodedString = processPutRequest(request, response);
- try (PrintWriter out = response.getWriter()) {
- out.println(encodedString);
- }
- } catch (Throwable ex) {
- logger.error("Error while handling Put request.", ex);
- response.sendError(409, "Failed to process your Put request. Check the back end logs for the stack trace");
- String errorMessage = "{\n"
- + " \"errors\":\n"
- + " [{\"statusCode\":\"409\",\n"
- + " \"errorMessage\":\"Failed to process your Put request. Check the back end logs for the stack trace.\"}]\n"
- + " }";
- try (PrintWriter out = response.getWriter()) {
- out.println(errorMessage);
- }
- }
- System.gc();
- }
- /**
- * Returns a short description of the Servlet.
- *
- * @return a String containing Servlet description
- */
- @Override
- public String getServletInfo() {
- return "Main SST Servlet that will handle facet-value modifications as well as saving targets, queries, and tags. Will likely do other stuff in the future as well.";
- }// </editor-fold>
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement