List of usage examples for java.io UnsupportedEncodingException getLocalizedMessage
public String getLocalizedMessage()
From source file:com.linkedpipes.plugin.loader.dcatAp11ToDkanBatch.DcatAp11ToDkanBatch.java
@Override public void execute() throws LpException { apiURI = configuration.getApiUri();//ww w .jav a 2s. c o m //for HTTP request failing on "failed to respond" boolean responded = false; if (apiURI == null || apiURI.isEmpty() || configuration.getUsername() == null || configuration.getUsername().isEmpty() || configuration.getPassword() == null || configuration.getPassword().isEmpty()) { throw exceptionFactory.failure("Missing required settings."); } Map<String, String> groups = getGroups(); LOG.debug("Querying metadata for datasets"); LinkedList<String> datasets = new LinkedList<>(); for (Map<String, Value> map : executeSelectQuery( "SELECT ?d WHERE {?d a <" + DcatAp11ToDkanBatchVocabulary.DCAT_DATASET_CLASS + ">}")) { datasets.add(map.get("d").stringValue()); } int current = 0; int total = datasets.size(); LOG.info("Found " + total + " datasets"); progressReport.start(total); token = getToken(configuration.getUsername(), configuration.getPassword()); for (String datasetURI : datasets) { current++; CloseableHttpResponse queryResponse = null; LOG.info("Processing dataset " + current + "/" + total + ": " + datasetURI); String publisher_uri = executeSimpleSelectQuery("SELECT ?publisher_uri WHERE {<" + datasetURI + "> <" + DCTERMS.PUBLISHER + "> ?publisher_uri }", "publisher_uri"); String publisher_name = executeSimpleSelectQuery( "SELECT ?publisher_name WHERE {<" + datasetURI + "> <" + DCTERMS.PUBLISHER + ">/<" + FOAF.NAME + "> ?publisher_name FILTER(LANGMATCHES(LANG(?publisher_name), \"" + configuration.getLoadLanguage() + "\"))}", "publisher_name"); if (!groups.containsKey(publisher_uri)) { LOG.debug("Creating group " + publisher_uri); if (publisher_name == null || publisher_name.isEmpty()) { throw exceptionFactory.failure("Publisher has no name: " + publisher_uri); } HttpPost httpPost = new HttpPost(apiURI + "/node"); httpPost.addHeader(new BasicHeader("Accept", "application/json")); httpPost.addHeader(new BasicHeader("X-CSRF-Token", token)); ArrayList<NameValuePair> postParameters = new ArrayList<>(); postParameters.add(new BasicNameValuePair("type", "group")); postParameters.add(new BasicNameValuePair("title", publisher_name)); postParameters.add(new BasicNameValuePair("body[und][0][value]", publisher_uri)); try { UrlEncodedFormEntity form = new UrlEncodedFormEntity(postParameters, "UTF-8"); httpPost.setEntity(form); } catch (UnsupportedEncodingException e) { LOG.error("Unexpected encoding issue"); } CloseableHttpResponse response = null; responded = false; do { try { response = postClient.execute(httpPost); if (response.getStatusLine().getStatusCode() == 200) { LOG.debug("Group created OK"); String orgID = new JSONObject(EntityUtils.toString(response.getEntity())) .getString("nid"); groups.put(publisher_uri, orgID); } else { String ent = EntityUtils.toString(response.getEntity()); LOG.error("Group:" + ent); //throw exceptionFactory.failed("Error creating group: " + ent); } responded = true; } catch (Exception e) { LOG.error(e.getLocalizedMessage(), e); } finally { if (response != null) { try { response.close(); } catch (IOException e) { LOG.error(e.getLocalizedMessage(), e); throw exceptionFactory.failure("Error creating group"); } } } } while (!responded); } ArrayList<NameValuePair> datasetFields = new ArrayList<>(); datasetFields.add(new BasicNameValuePair("type", "dataset")); LinkedList<String> keywords = new LinkedList<>(); for (Map<String, Value> map : executeSelectQuery( "SELECT ?keyword WHERE {<" + datasetURI + "> <" + DcatAp11ToDkanBatchVocabulary.DCAT_KEYWORD + "> ?keyword FILTER(LANGMATCHES(LANG(?keyword), \"" + configuration.getLoadLanguage() + "\"))}")) { keywords.add(map.get("keyword").stringValue()); } String concatTags = ""; for (String keyword : keywords) { String safekeyword = fixKeyword(keyword); if (safekeyword.length() >= 2) { concatTags += "\"\"" + safekeyword + "\"\" "; } } if (!concatTags.isEmpty()) { datasetFields.add(new BasicNameValuePair("field_tags[und][value_field]", concatTags)); } String title = executeSimpleSelectQuery("SELECT ?title WHERE {<" + datasetURI + "> <" + DCTERMS.TITLE + "> ?title FILTER(LANGMATCHES(LANG(?title), \"" + configuration.getLoadLanguage() + "\"))}", "title"); if (!title.isEmpty()) { datasetFields.add(new BasicNameValuePair("title", title)); } String description = executeSimpleSelectQuery("SELECT ?description WHERE {<" + datasetURI + "> <" + DCTERMS.DESCRIPTION + "> ?description FILTER(LANGMATCHES(LANG(?description), \"" + configuration.getLoadLanguage() + "\"))}", "description"); if (!description.isEmpty()) { datasetFields.add(new BasicNameValuePair("body[und][0][value]", description)); } else if (configuration.getProfile() .equals(DcatAp11ToDkanBatchVocabulary.PROFILES_NKOD.stringValue())) { //Description is mandatory in NKOD. If missing, add at least title. datasetFields.add(new BasicNameValuePair("body[und][0][value]", title)); } String issued = executeSimpleSelectQuery( "SELECT ?issued WHERE {<" + datasetURI + "> <" + DCTERMS.ISSUED + "> ?issued }", "issued"); if (!issued.isEmpty()) { //long unixTime = System.currentTimeMillis() / 1000L; datasetFields.add(new BasicNameValuePair("created", issued)); } String modified = executeSimpleSelectQuery( "SELECT ?modified WHERE {<" + datasetURI + "> <" + DCTERMS.MODIFIED + "> ?modified }", "modified"); if (!modified.isEmpty()) { datasetFields.add(new BasicNameValuePair("changed", modified)); } if (!publisher_uri.isEmpty()) { datasetFields .add(new BasicNameValuePair("og_group_ref[und][target_id]", groups.get(publisher_uri))); } if (configuration.getProfile().equals(DcatAp11ToDkanBatchVocabulary.PROFILES_NKOD.stringValue())) { String contactPoint = executeSimpleSelectQuery("SELECT ?contact WHERE {<" + datasetURI + "> <" + DcatAp11ToDkanBatchVocabulary.DCAT_CONTACT_POINT + ">/<" + DcatAp11ToDkanBatchVocabulary.VCARD_HAS_EMAIL + "> ?contact }", "contact"); if (!contactPoint.isEmpty()) { datasetFields .add(new BasicNameValuePair("field_maintainer_email[und][0][value]", contactPoint)); } String curatorName = executeSimpleSelectQuery("SELECT ?name WHERE {<" + datasetURI + "> <" + DcatAp11ToDkanBatchVocabulary.DCAT_CONTACT_POINT + ">/<" + DcatAp11ToDkanBatchVocabulary.VCARD_FN + "> ?name }", "name"); if (!curatorName.isEmpty()) { datasetFields.add(new BasicNameValuePair("field_maintainer[und][0][value]", curatorName)); } if (!publisher_uri.isEmpty()) { datasetFields.add(new BasicNameValuePair("field_publisher_uri[und][0][value]", publisher_uri)); } if (!publisher_name.isEmpty()) { datasetFields .add(new BasicNameValuePair("field_publisher_name[und][0][value]", publisher_name)); } String periodicity = executeSimpleSelectQuery("SELECT ?periodicity WHERE {<" + datasetURI + "> <" + DCTERMS.ACCRUAL_PERIODICITY + "> ?periodicity }", "periodicity"); if (!periodicity.isEmpty()) { datasetFields.add(new BasicNameValuePair("field_frequency_ods[und][0][value]", periodicity)); } else { //Mandatory in NKOD datasetFields.add(new BasicNameValuePair("field_frequency_ods[und][0][value]", "http://publications.europa.eu/resource/authority/frequency/UNKNOWN")); } String temporalStart = executeSimpleSelectQuery( "SELECT ?temporalStart WHERE {<" + datasetURI + "> <" + DCTERMS.TEMPORAL + ">/<" + DcatAp11ToDkanBatchVocabulary.SCHEMA_STARTDATE + "> ?temporalStart }", "temporalStart"); if (!temporalStart.isEmpty()) { datasetFields.add(new BasicNameValuePair("field_temporal_start[und][0][value]", temporalStart)); } String temporalEnd = executeSimpleSelectQuery( "SELECT ?temporalEnd WHERE {<" + datasetURI + "> <" + DCTERMS.TEMPORAL + ">/<" + DcatAp11ToDkanBatchVocabulary.SCHEMA_ENDDATE + "> ?temporalEnd }", "temporalEnd"); if (!temporalEnd.isEmpty()) { datasetFields.add(new BasicNameValuePair("field_temporal_end[und][0][value]", temporalEnd)); } String schemaURL = executeSimpleSelectQuery( "SELECT ?schema WHERE {<" + datasetURI + "> <" + FOAF.PAGE + "> ?schema }", "schema"); if (!schemaURL.isEmpty()) { datasetFields.add(new BasicNameValuePair("field_schema[und][0][value]", schemaURL)); } String spatial = executeSimpleSelectQuery( "SELECT ?spatial WHERE {<" + datasetURI + "> <" + DCTERMS.SPATIAL + "> ?spatial }", "spatial"); if (!spatial.isEmpty()) { datasetFields.add(new BasicNameValuePair("field_spatial[und][0][value]", spatial)); if (spatial.matches("http:\\/\\/ruian.linked.opendata.cz\\/resource\\/.*")) { String type = spatial.replaceAll( "http:\\/\\/ruian.linked.opendata.cz\\/resource\\/([^\\/]+)\\/(.*)", "$1"); String code = spatial.replaceAll( "http:\\/\\/ruian.linked.opendata.cz\\/resource\\/([^\\/]+)\\/(.*)", "$2"); String typ; //We should not parse IRIs, however, here we have no choice. switch (type) { case "vusc": typ = "VC"; break; case "obce": typ = "OB"; break; case "kraje": typ = "KR"; break; case "orp": typ = "OP"; break; case "momc": typ = "MC"; break; case "pou": typ = "PU"; break; default: typ = "ST"; } datasetFields.add(new BasicNameValuePair("field_ruian_type[und][0][value]", typ)); datasetFields.add(new BasicNameValuePair("field_ruian_code[und][0][value]", code)); } else { //RIAN type and code are mandatory in NKOD datasetFields.add(new BasicNameValuePair("field_ruian_type[und][0][value]", "ST")); datasetFields.add(new BasicNameValuePair("field_ruian_code[und][0][value]", "1")); } } else { //RIAN type and code are mandatory in NKOD datasetFields.add(new BasicNameValuePair("field_ruian_type[und][0][value]", "ST")); datasetFields.add(new BasicNameValuePair("field_ruian_code[und][0][value]", "1")); } //DCAT-AP v1.1: has to be an IRI from http://publications.europa.eu/mdr/authority/file-type/index.html LinkedList<String> themes = new LinkedList<>(); for (Map<String, Value> map : executeSelectQuery("SELECT ?theme WHERE {<" + datasetURI + "> <" + DcatAp11ToDkanBatchVocabulary.DCAT_THEME + "> ?theme }")) { themes.add(map.get("theme").stringValue()); } String concatThemes = ""; for (String theme : themes) { concatThemes += theme + " "; } if (!concatThemes.isEmpty()) datasetFields.add(new BasicNameValuePair("field_theme[und][0][value]", concatThemes)); } //Distributions LinkedList<String> distributions = new LinkedList<>(); for (Map<String, Value> map : executeSelectQuery("SELECT ?distribution WHERE {<" + datasetURI + "> <" + DcatAp11ToDkanBatchVocabulary.DCAT_DISTRIBUTION + "> ?distribution }")) { distributions.add(map.get("distribution").stringValue()); } for (int d = 0; d < distributions.size(); d++) { String distribution = distributions.get(d); ArrayList<NameValuePair> distroFields = new ArrayList<>(); distroFields.add(new BasicNameValuePair("type", "resource")); String dtitle = executeSimpleSelectQuery("SELECT ?title WHERE {<" + distribution + "> <" + DCTERMS.TITLE + "> ?title FILTER(LANGMATCHES(LANG(?title), \"" + configuration.getLoadLanguage() + "\"))}", "title"); if (dtitle.isEmpty()) { //Distribution title is mandatory in DKAN dtitle = title.isEmpty() ? "Resource" : title; } distroFields.add(new BasicNameValuePair("title", dtitle)); String ddescription = executeSimpleSelectQuery("SELECT ?description WHERE {<" + distribution + "> <" + DCTERMS.DESCRIPTION + "> ?description FILTER(LANGMATCHES(LANG(?description), \"" + configuration.getLoadLanguage() + "\"))}", "description"); if (!ddescription.isEmpty()) { distroFields.add(new BasicNameValuePair("body[und][0][value]", ddescription)); } /*String dformat = executeSimpleSelectQuery("SELECT ?format WHERE {<" + distribution + "> <"+ DCTERMS.FORMAT + "> ?format }", "format"); if (!dformat.isEmpty() && codelists != null) { String formatlabel = executeSimpleCodelistSelectQuery("SELECT ?formatlabel WHERE {<" + dformat + "> <"+ SKOS.PREF_LABEL + "> ?formatlabel FILTER(LANGMATCHES(LANG(?formatlabel), \"en\"))}", "formatlabel"); if (!formatlabel.isEmpty()) { distroFields.add(new BasicNameValuePair("field_format[und][0][value]", formatlabel)); } }*/ String dmimetype = executeSimpleSelectQuery("SELECT ?format WHERE {<" + distribution + "> <" + DcatAp11ToDkanBatchVocabulary.DCAT_MEDIATYPE + "> ?format }", "format"); if (!dmimetype.isEmpty()) { distroFields.add(new BasicNameValuePair("field_link_remote_file[und][0][filemime]", dmimetype.replaceAll(".*\\/([^\\/]+\\/[^\\/]+)", "$1"))); } String dwnld = executeSimpleSelectQuery("SELECT ?dwnld WHERE {<" + distribution + "> <" + DcatAp11ToDkanBatchVocabulary.DCAT_DOWNLOADURL + "> ?dwnld }", "dwnld"); String access = executeSimpleSelectQuery("SELECT ?acc WHERE {<" + distribution + "> <" + DcatAp11ToDkanBatchVocabulary.DCAT_ACCESSURL + "> ?acc }", "acc"); //we prefer downloadURL, but only accessURL is mandatory if (dwnld == null || dwnld.isEmpty()) { dwnld = access; if (dwnld == null || dwnld.isEmpty()) { LOG.warn("Empty download and access URLs: " + datasetURI); continue; } } if (!dwnld.isEmpty()) { distroFields.add(new BasicNameValuePair( "field_link_remote_file[und][0][filefield_remotefile][url]", dwnld)); } /*if (!distribution.isEmpty()) { distro.put("distro_url", distribution); }*/ String dissued = executeSimpleSelectQuery( "SELECT ?issued WHERE {<" + distribution + "> <" + DCTERMS.ISSUED + "> ?issued }", "issued"); if (!dissued.isEmpty()) { distroFields.add(new BasicNameValuePair("created", dissued)); } String dmodified = executeSimpleSelectQuery( "SELECT ?modified WHERE {<" + distribution + "> <" + DCTERMS.MODIFIED + "> ?modified }", "modified"); if (!dmodified.isEmpty()) { distroFields.add(new BasicNameValuePair("changed", dmodified)); } if (configuration.getProfile().equals(DcatAp11ToDkanBatchVocabulary.PROFILES_NKOD.stringValue())) { String dtemporalStart = executeSimpleSelectQuery( "SELECT ?temporalStart WHERE {<" + distribution + "> <" + DCTERMS.TEMPORAL + ">/<" + DcatAp11ToDkanBatchVocabulary.SCHEMA_STARTDATE + "> ?temporalStart }", "temporalStart"); if (!dtemporalStart.isEmpty()) { distroFields .add(new BasicNameValuePair("field_temporal_start[und][0][value]", dtemporalStart)); } String dtemporalEnd = executeSimpleSelectQuery( "SELECT ?temporalEnd WHERE {<" + distribution + "> <" + DCTERMS.TEMPORAL + ">/<" + DcatAp11ToDkanBatchVocabulary.SCHEMA_ENDDATE + "> ?temporalEnd }", "temporalEnd"); if (!dtemporalEnd.isEmpty()) { distroFields.add(new BasicNameValuePair("field_temporal_end[und][0][value]", dtemporalEnd)); } String dschemaURL = executeSimpleSelectQuery( "SELECT ?schema WHERE {<" + distribution + "> <" + DCTERMS.CONFORMS_TO + "> ?schema }", "schema"); if (!dschemaURL.isEmpty()) { distroFields.add(new BasicNameValuePair("field_described_by[und][0][value]", dschemaURL)); } String dlicense = executeSimpleSelectQuery( "SELECT ?license WHERE {<" + distribution + "> <" + DCTERMS.LICENSE + "> ?license }", "license"); if (dlicense.isEmpty()) { //This is mandatory in NKOD and DKAN extension dlicense = "http://joinup.ec.europa.eu/category/licence/unknown-licence"; } distroFields.add(new BasicNameValuePair("field_licence[und][0][value]", dlicense)); if (dmimetype.isEmpty()) { //! field_format => mimetype //This is mandatory in NKOD and DKAN extension dmimetype = "http://www.iana.org/assignments/media-types/application/octet-stream"; } distroFields.add(new BasicNameValuePair("field_mimetype[und][0][value]", dmimetype.replaceAll(".*\\/([^\\/]+\\/[^\\/]+)", "$1"))); } //POST DISTRIBUTION LOG.debug("Creating resource " + distribution); HttpPost httpPost = new HttpPost(apiURI + "/node"); httpPost.addHeader(new BasicHeader("Accept", "application/json")); httpPost.addHeader(new BasicHeader("X-CSRF-Token", token)); try { UrlEncodedFormEntity form = new UrlEncodedFormEntity(distroFields, "UTF-8"); httpPost.setEntity(form); } catch (UnsupportedEncodingException e) { LOG.error("Unexpected encoding issue"); } CloseableHttpResponse response = null; String resID = null; responded = false; do { try { LOG.debug("POSTing resource " + distribution); response = postClient.execute(httpPost); if (response.getStatusLine().getStatusCode() == 200) { String resp = EntityUtils.toString(response.getEntity()); LOG.debug("Resource created OK: " + resp); try { resID = new JSONObject(resp).getString("nid"); datasetFields.add(new BasicNameValuePair( "field_resources[und][" + d + "][target_id]", dtitle + " (" + resID + ")")); } catch (JSONException e) { LOG.error(e.getLocalizedMessage(), e); LOG.error("Request: " + distroFields.toString()); LOG.error("Response: " + resp); } } else { String ent = EntityUtils.toString(response.getEntity()); LOG.error("Resource:" + ent); //throw exceptionFactory.failed("Error creating resource: " + ent); } responded = true; } catch (NoHttpResponseException e) { LOG.error(e.getLocalizedMessage(), e); } catch (IOException e) { LOG.error(e.getLocalizedMessage(), e); } finally { if (response != null) { try { response.close(); } catch (IOException e) { LOG.error(e.getLocalizedMessage(), e); //throw exceptionFactory.failed("Error creating resource"); } } } } while (!responded); } LOG.debug("Creating dataset " + datasetURI); HttpPost httpPost = new HttpPost(apiURI + "/node"); httpPost.addHeader(new BasicHeader("Accept", "application/json")); httpPost.addHeader(new BasicHeader("X-CSRF-Token", token)); try { UrlEncodedFormEntity form = new UrlEncodedFormEntity(datasetFields, "UTF-8"); httpPost.setEntity(form); } catch (UnsupportedEncodingException e) { LOG.error("Unexpected encoding issue"); } CloseableHttpResponse response = null; responded = false; do { try { LOG.debug("POSTing dataset " + datasetURI); response = postClient.execute(httpPost); if (response.getStatusLine().getStatusCode() == 200) { LOG.debug("Dataset created OK"); } else { String ent = EntityUtils.toString(response.getEntity()); LOG.error("Dataset:" + ent); //throw exceptionFactory.failed("Error creating dataset: " + ent); } responded = true; } catch (NoHttpResponseException e) { LOG.error(e.getLocalizedMessage(), e); } catch (IOException e) { LOG.error(e.getLocalizedMessage(), e); } finally { if (response != null) { try { response.close(); } catch (IOException e) { LOG.error(e.getLocalizedMessage(), e); throw exceptionFactory.failure("Error creating dataset"); } } } } while (!responded); progressReport.entryProcessed(); } try { queryClient.close(); createClient.close(); postClient.close(); } catch (IOException e) { LOG.error(e.getLocalizedMessage(), e); } progressReport.done(); }
From source file:com.evolveum.polygon.scim.StandardScimHandlingStrategy.java
@Override public Uid update(Uid uid, String resourceEndPoint, ObjectTranslator objectTranslator, Set<Attribute> attributes, ScimConnectorConfiguration conf) { ServiceAccessManager accessManager = new ServiceAccessManager(conf); Header authHeader = accessManager.getAuthHeader(); String scimBaseUri = accessManager.getBaseUri(); if (authHeader == null || scimBaseUri.isEmpty()) { throw new ConnectorException( "The data needed for authorization of request to the provider was not found."); }// ww w . j av a 2 s . c om HttpClient httpClient = initHttpClient(conf); String uri = new StringBuilder(scimBaseUri).append(SLASH).append(resourceEndPoint).append(SLASH) .append(uid.getUidValue()).toString(); LOGGER.info("The uri for the update request: {0}", uri); String responseString = null; try { LOGGER.info("Query url: {0}", uri); JSONObject jsonObject = objectTranslator.translateSetToJson(attributes, null, resourceEndPoint); // LOGGER.info("The update json object: {0}", jsonObject); HttpPatch httpPatch = buildHttpPatch(uri, authHeader, jsonObject); try (CloseableHttpResponse response = (CloseableHttpResponse) httpClient.execute(httpPatch)) { int statusCode = response.getStatusLine().getStatusCode(); HttpEntity entity = response.getEntity(); if (entity != null) { responseString = EntityUtils.toString(entity); } else { responseString = ""; } if (statusCode == 200 || statusCode == 201) { LOGGER.info("Update of resource was succesfull"); if (!responseString.isEmpty()) { JSONObject json = new JSONObject(responseString); LOGGER.ok("Json response: {0}", json.toString()); Uid id = new Uid(json.getString(ID)); return id; } else { LOGGER.warn("Service provider response is empty, no response after the update procedure"); } } else if (statusCode == 204) { LOGGER.warn("Status code {0}. Response body left intentionally empty", statusCode); return uid; } else if (statusCode == 404) { ErrorHandler.onNoSuccess(responseString, statusCode, uri); StringBuilder errorBuilder = new StringBuilder("The resource with the uid: ").append(uid) .append(" was not found."); throw new UnknownUidException(errorBuilder.toString()); } else if (statusCode == 500 && GROUPS.equals(resourceEndPoint)) { Uid id = groupUpdateProcedure(statusCode, jsonObject, uri, authHeader, conf); if (id != null) { return id; } else { ErrorHandler.onNoSuccess(responseString, statusCode, "updating object"); } } else { handleInvalidStatus("while updating resource. ", responseString, "updating object", statusCode); } } } catch (UnsupportedEncodingException e) { LOGGER.error("Unsupported encoding: {0}. Occurrence in the process of updating a resource object ", e.getMessage()); LOGGER.info("Unsupported encoding: {0}. Occurrence in the process of updating a resource object ", e); throw new ConnectorException( "Unsupported encoding, Occurrence in the process of updating a resource object ", e); } catch (JSONException e) { LOGGER.error( "An exception has occurred while processing a json object. Occurrence in the process of updating a resource object: {0}", e.getLocalizedMessage()); LOGGER.info( "An exception has occurred while processing a json object. Occurrence in the process of updating a resource object: {0}", e); throw new ConnectorException( "An exception has occurred while processing a json object,Occurrence in the process of updating a resource object", e); } catch (ClientProtocolException e) { LOGGER.error( "An protocol exception has occurred while in the process of updating a resource object. Possible mismatch in the interpretation of the HTTP specification: {0}", e.getLocalizedMessage()); LOGGER.info( "An protocol exception has occurred while in the process of updating a resource object. Possible mismatch in the interpretation of the HTTP specification: {0}", e); throw new ConnectionFailedException( "An protocol exception has occurred while in the process of updating a resource object, Possible mismatch in the interpretation of the HTTP specification.", e); } catch (IOException e) { StringBuilder errorBuilder = new StringBuilder( "An error has occurred while processing the http response. Occurrence in the process of updating a resource object wit the Uid: "); errorBuilder.append(uid.toString()); if ((e instanceof SocketTimeoutException || e instanceof NoRouteToHostException)) { errorBuilder.insert(0, "The connection timed out. "); throw new OperationTimeoutException(errorBuilder.toString(), e); } else { LOGGER.error( "An error has occurred while processing the http response. Occurrence in the process of updating a resource object: {0}", e.getLocalizedMessage()); LOGGER.info( "An error has occurred while processing the http response. Occurrence in the process of updating a resource object: {0}", e); throw new ConnectorIOException(errorBuilder.toString(), e); } } return null; }
From source file:swp.bibjsf.persistence.Data.java
/** * Returns an MD5 hash of reader's password. * * @param reader//from w w w. java 2s. c om * reader whose password is to be hashed * @return MD5 hash of reader's password * @throws NoSuchAlgorithmException * thrown if there is no MD5 algorithm */ private String hashPassword(Reader reader) throws NoSuchAlgorithmException { MessageDigest md = MessageDigest.getInstance("MD5"); String readerPassword = reader.getPassword(); if (readerPassword == null) { readerPassword = ""; } byte[] bpassword; try { bpassword = md.digest(readerPassword.getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { throw new NoSuchAlgorithmException("no UTF-8 encoding possible: " + e.getLocalizedMessage()); } StringBuffer password = new StringBuffer(); for (int i = 0; i < bpassword.length; i++) { password.append(Integer.toString((bpassword[i] & 0xff) + 0x100, 16).substring(1)); } return password.toString(); }
From source file:org.collectionspace.chain.csp.persistence.services.GenericStorage.java
/** * get data needed for list of objects related to a termUsed * @param storage//from w w w. j a va 2 s . c om * @param creds * @param cache * @param path * @return * @throws ExistException * @throws UnderlyingStorageException * @throws JSONException * @throws UnimplementedException */ public JSONObject refObjViewRetrieveJSON(ContextualisedStorage storage, CSPRequestCredentials creds, CSPRequestCache cache, String path, JSONObject restrictions, Record vr) throws ExistException, UnderlyingStorageException, JSONException, UnimplementedException { JSONObject out = new JSONObject(); /* * Usually, processing of list results utilizes "glean" maps stored as instance variables (view_good, view_map, * xxx_view_deurn, view_search_optional, view_merge, and view_useCsid). These instance variables can be * considered defaults that work for the most common kinds of list results. The format returned from the refobj * services call is non-standard with respect to most list results, so we have to set up a special context to * interpret the results the way we need. Swapping out the instance variables is not thread-safe (CSPACE-5988). * Instead, the required maps are defined locally to this method, and passed as parameters into the methods * that need them, which overrides the use of the corresponding instance variables. */ try { Map<String, String> refObj_view_good = new HashMap<String, String>();// map of servicenames of fields to descriptors Map<String, String> refObj_view_map = new HashMap<String, String>(); // map of csid to service name of field if (vr.hasRefObjUsed()) { path = getRestrictedPath(path, restrictions, "kw", "", false, ""); //XXX need a way to append the data needed from the field, // which we don't know until after we have got the information... refObj_view_map.put("docType", "docType"); refObj_view_map.put("docId", "docId"); refObj_view_map.put("docName", "docName"); refObj_view_map.put("docNumber", "docNumber"); refObj_view_map.put("sourceField", "sourceField"); refObj_view_map.put("uri", "uri"); refObj_view_map.put("refName", "refName"); refObj_view_good.put("terms_docType", "docType"); refObj_view_good.put("terms_docId", "docId"); refObj_view_good.put("terms_docName", "docName"); refObj_view_good.put("terms_docNumber", "docNumber"); refObj_view_good.put("terms_sourceField", "sourceField"); refObj_view_good.put("terms_refName", "refName"); JSONObject data = getRepeatableListView(storage, creds, cache, path, "authority-ref-doc-list/authority-ref-doc-item", "uri", true, vr, refObj_view_map);//XXX this might be the wrong record to pass to checkf or hard/soft delet listing JSONArray recs = data.getJSONArray("listItems"); if (data.has("pagination")) { out.put("pagination", data.getJSONObject("pagination")); } JSONArray items = new JSONArray(); //String[] filepaths = (String[]) data.get("listItems"); for (int i = 0; i < recs.length(); ++i) { String uri = recs.getJSONObject(i).getString("csid"); String filePath = uri; // recs.getJSONObject(i).getString("csid"); if (filePath != null && filePath.startsWith("/")) filePath = filePath.substring(1); String[] parts = filePath.split("/"); String recordurl = parts[0]; Record thisr = vr.getSpec().getRecordByServicesUrl(recordurl); // Set up the glean maps required for this record. We need to reset these each time // through the loop, because every record could be a different type. Map<String, String> thisr_view_good = new HashMap<String, String>(refObj_view_good); Map<String, String> thisr_view_map = new HashMap<String, String>(refObj_view_map); Set<String> thisr_xxx_view_deurn = new HashSet<String>(); Set<String> thisr_view_search_optional = new HashSet<String>(); Map<String, List<String>> thisr_view_merge = new HashMap<String, List<String>>(); Map<String, List<String>> thisr_view_useCsid = new HashMap<String, List<String>>(); initializeGlean(thisr, thisr_view_good, thisr_view_map, thisr_xxx_view_deurn, thisr_view_search_optional, thisr_view_merge, thisr_view_useCsid); String csid = parts[parts.length - 1]; JSONObject dataitem = miniViewRetrieveJSON(cache, creds, csid, "terms", uri, thisr, thisr_view_good, thisr_xxx_view_deurn, thisr_view_search_optional, thisr_view_merge, thisr_view_useCsid); dataitem.getJSONObject("summarylist").put("uri", filePath); String key = recs.getJSONObject(i).getString("sourceField"); dataitem.getJSONObject("summarylist").put("sourceField", key); String fieldName = "unknown"; String fieldSelector = "unknown"; if (key.contains(":")) { fieldName = key.split(":")[1]; //XXX fixCSPACE-2909 would be nice if they gave us the actual field rather than the parent //XXX CSPACE-2586 // FIXME: We might remove the following if CSPACE-2909's fix makes this moot - ADR 2012-07-19 while (thisr.getFieldFullList(fieldName) instanceof Repeat || thisr.getFieldFullList(fieldName) instanceof Group) { fieldName = ((Repeat) thisr.getFieldFullList(fieldName)).getChildren("GET")[0].getID(); } Field fieldinstance = (Field) thisr.getFieldFullList(fieldName); fieldSelector = fieldinstance.getSelector(); } dataitem.put("csid", csid); dataitem.put("sourceFieldselector", fieldSelector); dataitem.put("sourceFieldName", fieldName); dataitem.put("sourceFieldType", dataitem.getJSONObject("summarylist").getString("docType")); dataitem.put("sourceFieldType", dataitem.getJSONObject("summarylist").getString("docType")); //items.put(csid+":"+key,dataitem); items.put(dataitem); } out.put("items", items); } return out; } catch (ConnectionException e) { log.error("failed to retrieve refObjs for " + path); JSONObject dataitem = new JSONObject(); dataitem.put("csid", ""); dataitem.put("sourceFieldselector", "Functionality Failed"); dataitem.put("sourceFieldName", "Functionality Failed"); dataitem.put("sourceFieldType", "Functionality Failed"); dataitem.put("message", e.getMessage()); out.put("Functionality Failed", dataitem); //return out; throw new UnderlyingStorageException("Connection problem" + e.getLocalizedMessage(), e.getStatus(), e.getUrl(), e); } catch (UnsupportedEncodingException uae) { log.error("failed to retrieve refObjs for " + path); JSONObject dataitem = new JSONObject(); dataitem.put("message", uae.getMessage()); out.put("Functionality Failed", dataitem); throw new UnderlyingStorageException("Problem building query" + uae.getLocalizedMessage(), uae); } }
From source file:es.uvigo.darwin.jmodeltest.io.HtmlReporter.java
public static void buildReport(ApplicationOptions options, Model models[], File mOutputFile, TreeSummary summary) {/* w w w. j a va2 s. com*/ File outputFile; if (mOutputFile != null) { if (!(mOutputFile.getName().endsWith(".htm") || mOutputFile.getName().endsWith(".html"))) { outputFile = new File(mOutputFile.getAbsolutePath() + ".html"); } else { outputFile = mOutputFile; } } else { outputFile = new File(LOG_DIR.getPath() + File.separator + options.getInputFile().getName() + ".jmodeltest." + options.getExecutionName() + ".html"); } // Add the values in the datamodel datamodel = new HashMap<String, Object>(); java.util.Date current_time = new java.util.Date(); datamodel.put("date", current_time.toString()); datamodel.put("system", System.getProperty("os.name") + " " + System.getProperty("os.version") + ", arch: " + System.getProperty("os.arch") + ", bits: " + System.getProperty("sun.arch.data.model") + ", numcores: " + Runtime.getRuntime().availableProcessors()); fillInWithOptions(options); fillInWithSortedModels(models); datamodel.put("isTopologiesSummary", summary != null ? new Integer(1) : new Integer(0)); if (summary != null) { fillInWithTopologies(summary, options); } if (options.doAIC) { Collection<Map<String, String>> aicModels = new ArrayList<Map<String, String>>(); Map<String, String> bestAicModel = new HashMap<String, String>(); fillInWIthInformationCriterion(ModelTest.getMyAIC(), aicModels, bestAicModel); datamodel.put("aicModels", aicModels); datamodel.put("bestAicModel", bestAicModel); datamodel.put("aicConfidenceCount", ModelTest.getMyAIC().getConfidenceModels().size()); StringBuffer aicConfModels = new StringBuffer(); for (Model model : ModelTest.getMyAIC().getConfidenceModels()) aicConfModels.append(model.getName() + " "); datamodel.put("aicConfidenceList", aicConfModels.toString()); if (options.writePAUPblock) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos); TextOutputStream strOutput = new TextOutputStream(ps); ModelTest.WritePaupBlock(strOutput, "AIC", ModelTest.getMyAIC().getMinModel()); try { String pblock = baos.toString("UTF8"); pblock = pblock.replaceAll("\n", "<br/>"); datamodel.put("aicPaup", pblock); } catch (UnsupportedEncodingException e) { } } buildChart(outputFile, ModelTest.getMyAIC()); datamodel.put("aicEuImagePath", IMAGES_DIR.getName() + File.separator + outputFile.getName() + "_eu_AIC.png"); datamodel.put("aicRfImagePath", IMAGES_DIR.getName() + File.separator + outputFile.getName() + "_rf_AIC.png"); } if (options.doAICc) { Collection<Map<String, String>> aiccModels = new ArrayList<Map<String, String>>(); Map<String, String> bestAiccModel = new HashMap<String, String>(); fillInWIthInformationCriterion(ModelTest.getMyAICc(), aiccModels, bestAiccModel); datamodel.put("aiccModels", aiccModels); datamodel.put("bestAiccModel", bestAiccModel); datamodel.put("aiccConfidenceCount", ModelTest.getMyAICc().getConfidenceModels().size()); StringBuffer aiccConfModels = new StringBuffer(); for (Model model : ModelTest.getMyAICc().getConfidenceModels()) aiccConfModels.append(model.getName() + " "); datamodel.put("aiccConfidenceList", aiccConfModels.toString()); if (options.writePAUPblock) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos); TextOutputStream strOutput = new TextOutputStream(ps); ModelTest.WritePaupBlock(strOutput, "AICc", ModelTest.getMyAICc().getMinModel()); try { String pblock = baos.toString("UTF8"); pblock = pblock.replaceAll("\n", "<br/>"); datamodel.put("aiccPaup", pblock); } catch (UnsupportedEncodingException e) { } } buildChart(outputFile, ModelTest.getMyAICc()); datamodel.put("aiccEuImagePath", IMAGES_DIR.getName() + File.separator + outputFile.getName() + "_eu_AICc.png"); datamodel.put("aiccRfImagePath", IMAGES_DIR.getName() + File.separator + outputFile.getName() + "_rf_AICc.png"); } if (options.doBIC) { Collection<Map<String, String>> bicModels = new ArrayList<Map<String, String>>(); Map<String, String> bestBicModel = new HashMap<String, String>(); fillInWIthInformationCriterion(ModelTest.getMyBIC(), bicModels, bestBicModel); datamodel.put("bicModels", bicModels); datamodel.put("bestBicModel", bestBicModel); datamodel.put("bicConfidenceCount", ModelTest.getMyBIC().getConfidenceModels().size()); StringBuffer bicConfModels = new StringBuffer(); for (Model model : ModelTest.getMyBIC().getConfidenceModels()) bicConfModels.append(model.getName() + " "); datamodel.put("bicConfidenceList", bicConfModels.toString()); if (options.writePAUPblock) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos); TextOutputStream strOutput = new TextOutputStream(ps); ModelTest.WritePaupBlock(strOutput, "BIC", ModelTest.getMyBIC().getMinModel()); try { String pblock = baos.toString("UTF8"); pblock = pblock.replaceAll("\n", "<br/>"); datamodel.put("bicPaup", pblock); } catch (UnsupportedEncodingException e) { } } buildChart(outputFile, ModelTest.getMyBIC()); datamodel.put("bicEuImagePath", IMAGES_DIR.getName() + File.separator + outputFile.getName() + "_eu_BIC.png"); datamodel.put("bicRfImagePath", IMAGES_DIR.getName() + File.separator + outputFile.getName() + "_rf_BIC.png"); } if (options.doDT) { Collection<Map<String, String>> dtModels = new ArrayList<Map<String, String>>(); Map<String, String> bestDtModel = new HashMap<String, String>(); fillInWIthInformationCriterion(ModelTest.getMyDT(), dtModels, bestDtModel); datamodel.put("dtModels", dtModels); datamodel.put("bestDtModel", bestDtModel); datamodel.put("dtConfidenceCount", ModelTest.getMyDT().getConfidenceModels().size()); StringBuffer dtConfModels = new StringBuffer(); for (Model model : ModelTest.getMyDT().getConfidenceModels()) dtConfModels.append(model.getName() + " "); datamodel.put("dtConfidenceList", dtConfModels.toString()); if (options.writePAUPblock) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos); TextOutputStream strOutput = new TextOutputStream(ps); ModelTest.WritePaupBlock(strOutput, "DT", ModelTest.getMyDT().getMinModel()); try { String pblock = baos.toString("UTF8"); pblock = pblock.replaceAll("\n", "<br/>"); datamodel.put("dtPaup", pblock); } catch (UnsupportedEncodingException e) { } } buildChart(outputFile, ModelTest.getMyDT()); datamodel.put("dtEuImagePath", IMAGES_DIR.getName() + File.separator + outputFile.getName() + "_eu_DT.png"); datamodel.put("dtRfImagePath", IMAGES_DIR.getName() + File.separator + outputFile.getName() + "_rf_DT.png"); } datamodel.put("doAICAveragedPhylogeny", ModelTest.getConsensusAIC() != null ? new Integer(1) : new Integer(0)); if (ModelTest.getConsensusAIC() != null) { datamodel.put("aicConsensusTree", TreeUtilities.toNewick(ModelTest.getConsensusAIC().getConsensus(), true, true, true)); datamodel.put("consensusType", ModelTest.getConsensusAIC().getConsensusType()); } datamodel.put("doAICcAveragedPhylogeny", ModelTest.getConsensusAICc() != null ? new Integer(1) : new Integer(0)); if (ModelTest.getConsensusAICc() != null) { datamodel.put("aiccConsensusTree", TreeUtilities.toNewick(ModelTest.getConsensusAICc().getConsensus(), true, true, true)); datamodel.put("consensusType", ModelTest.getConsensusAICc().getConsensusType()); } datamodel.put("doBICAveragedPhylogeny", ModelTest.getConsensusBIC() != null ? new Integer(1) : new Integer(0)); if (ModelTest.getConsensusBIC() != null) { datamodel.put("bicConsensusTree", TreeUtilities.toNewick(ModelTest.getConsensusBIC().getConsensus(), true, true, true)); datamodel.put("consensusType", ModelTest.getConsensusBIC().getConsensusType()); } datamodel.put("doDTAveragedPhylogeny", ModelTest.getConsensusDT() != null ? new Integer(1) : new Integer(0)); if (ModelTest.getConsensusDT() != null) { datamodel.put("dtConsensusTree", TreeUtilities.toNewick(ModelTest.getConsensusDT().getConsensus(), true, true, true)); datamodel.put("consensusType", ModelTest.getConsensusDT().getConsensusType()); } // Process the template using FreeMarker try { freemarkerDo(datamodel, "index.html", outputFile); } catch (Exception e) { System.out.println("There was a problem building the html log files: " + e.getLocalizedMessage()); } }