List of usage examples for java.util LinkedHashMap get
public V get(Object key)
From source file:egovframework.rte.fdl.security.securedobject.impl.SecuredObjectDAO.java
public LinkedHashMap getRolesAndResources(String resourceType) throws Exception { LinkedHashMap resourcesMap = new LinkedHashMap(); String sqlRolesAndResources;/*from w w w . ja v a 2 s . c o m*/ boolean isResourcesUrl = true; if ("method".equals(resourceType)) { sqlRolesAndResources = getSqlRolesAndMethod(); isResourcesUrl = false; } else if ("pointcut".equals(resourceType)) { sqlRolesAndResources = getSqlRolesAndPointcut(); isResourcesUrl = false; } else { sqlRolesAndResources = getSqlRolesAndUrl(); } List resultList = this.namedParameterJdbcTemplate.queryForList(sqlRolesAndResources, new HashMap()); Iterator itr = resultList.iterator(); Map tempMap; String preResource = null; String presentResourceStr; Object presentResource; while (itr.hasNext()) { tempMap = (Map) itr.next(); presentResourceStr = (String) tempMap.get(resourceType); // url ? RequestKey ?? key Map? presentResource = isResourcesUrl ? new RequestKey(presentResourceStr) : (Object) presentResourceStr; List configList = new LinkedList(); // ? requestMap ? Resource ? Role ? // ?? ? ? , sort_order // resource(Resource) ? ? // Resource ? Role ? ? ?. // Role List (SecurityConfig) ? ?? // ?? if (preResource != null && presentResourceStr.equals(preResource)) { List preAuthList = (List) ((ConfigAttributeDefinition) resourcesMap.get(presentResource)) .getConfigAttributes(); Iterator preAuthItr = preAuthList.iterator(); while (preAuthItr.hasNext()) { SecurityConfig tempConfig = (SecurityConfig) preAuthItr.next(); configList.add(tempConfig); } } configList.add(new SecurityConfig((String) tempMap.get("authority"))); ConfigAttributeDefinition cad = new ConfigAttributeDefinition(configList); // ?? Resource ? ??? Role ? // ? resourceKey ? ? Role // ? ?. resourcesMap.put(presentResource, cad); // ? resource ? preResource = presentResourceStr; } return resourcesMap; }
From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.TestElasticsearchIndexUtils.java
@Test public void test_parseDefaultMapping() throws JsonProcessingException, IOException { // Check the different components // Build/"unbuild" match pair assertEquals(Tuples._2T("*", "*"), ElasticsearchIndexUtils.buildMatchPair(_mapper.readTree("{}"))); assertEquals(Tuples._2T("field*", "*"), ElasticsearchIndexUtils.buildMatchPair(_mapper.readTree("{\"match\":\"field*\"}"))); assertEquals(Tuples._2T("field*field", "type*"), ElasticsearchIndexUtils.buildMatchPair( _mapper.readTree("{\"match\":\"field*field\", \"match_mapping_type\": \"type*\"}"))); assertEquals("testBARSTAR_string", ElasticsearchIndexUtils.getFieldNameFromMatchPair(Tuples._2T("test_*", "string"))); // More complex objects final String properties = Resources.toString( Resources.getResource("com/ikanow/aleph2/search_service/elasticsearch/utils/properties_test.json"), Charsets.UTF_8);//from ww w .j a va 2 s .co m final String templates = Resources.toString( Resources.getResource("com/ikanow/aleph2/search_service/elasticsearch/utils/templates_test.json"), Charsets.UTF_8); final String both = Resources.toString( Resources .getResource("com/ikanow/aleph2/search_service/elasticsearch/utils/full_mapping_test.json"), Charsets.UTF_8); final JsonNode properties_json = _mapper.readTree(properties); final JsonNode templates_json = _mapper.readTree(templates); final JsonNode both_json = _mapper.readTree(both); // Properties, empty + non-empty final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> props_test1 = ElasticsearchIndexUtils .getProperties(templates_json); assertTrue("Empty map if not present", props_test1.isEmpty()); final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> props_test2 = ElasticsearchIndexUtils .getProperties(properties_json); assertEquals(4, props_test2.size()); assertEquals(Arrays.asList("@version", "@timestamp", "sourceKey", "geoip"), props_test2.keySet().stream().map(e -> e.left().value()).collect(Collectors.toList())); assertEquals("{\"type\":\"string\",\"index\":\"not_analyzed\"}", props_test2.get(Either.left("sourceKey")).toString()); // Templates, empty + non-empty final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> templates_test1 = ElasticsearchIndexUtils .getTemplates(properties_json, _mapper.readTree("{}"), Collections.emptySet()); assertTrue("Empty map if not present", templates_test1.isEmpty()); final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> templates_test2 = ElasticsearchIndexUtils .getTemplates(templates_json, _mapper.readTree("{}"), Collections.emptySet()); assertEquals("getTemplates: " + templates_test2, 2, templates_test2.size()); assertEquals(Arrays.asList(Tuples._2T("*", "string"), Tuples._2T("*", "number")), templates_test2.keySet().stream().map(e -> e.right().value()).collect(Collectors.toList())); // Some more properties test final List<String> nested_properties = ElasticsearchIndexUtils.getAllFixedFields_internal(properties_json) .collect(Collectors.toList()); assertEquals(Arrays.asList("@version", "@timestamp", "sourceKey", "geoip", "geoip.location"), nested_properties); final Set<String> nested_properties_2 = ElasticsearchIndexUtils.getAllFixedFields(both_json); assertEquals(Arrays.asList("sourceKey", "@timestamp", "geoip", "geoip.location", "@version"), new ArrayList<String>(nested_properties_2)); // Putting it all together... final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> total_result1 = ElasticsearchIndexUtils .parseDefaultMapping(both_json, Optional.of("type_test"), Optional.empty(), Optional.empty(), _config.search_technology_override(), _mapper); assertEquals(4, total_result1.size()); assertEquals( "{\"mapping\":{\"type\":\"number\",\"index\":\"analyzed\"},\"path_match\":\"test*\",\"match_mapping_type\":\"number\"}", total_result1.get(Either.right(Tuples._2T("test*", "number"))).toString()); assertEquals("{\"type\":\"date\"}", total_result1.get(Either.left("@timestamp1")).toString()); final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> total_result2 = ElasticsearchIndexUtils .parseDefaultMapping(both_json, Optional.empty(), Optional.empty(), Optional.empty(), _config.search_technology_override(), _mapper); assertEquals(7, total_result2.size()); assertEquals(true, total_result2.get(Either.right(Tuples._2T("*", "string"))).get("mapping") .get("omit_norms").asBoolean()); assertEquals("{\"type\":\"date\",\"fielddata\":{}}", total_result2.get(Either.left("@timestamp")).toString()); // A couple of error checks: // - Missing mapping // - Mapping not an object }
From source file:com.cwctravel.hudson.plugins.extended_choice_parameter.ExtendedChoiceParameterDefinition.java
LinkedHashMap<String, LinkedHashSet<String>> calculateChoicesByDropdownId() throws Exception { File file = new File(propertyFile); List<String[]> fileLines = Collections.emptyList(); if (file.isFile()) { CSVReader csvReader = null;//from ww w . ja v a 2s .c om try { csvReader = new CSVReader(new FileReader(file), '\t'); fileLines = csvReader.readAll(); } finally { csvReader.close(); } } else { URL propertyFileUrl = new URL(propertyFile); CSVReader csvReader = null; try { csvReader = new CSVReader(new InputStreamReader(propertyFileUrl.openStream()), '\t'); fileLines = csvReader.readAll(); } finally { csvReader.close(); } } if (fileLines.size() < 2) { throw new Exception("Multi level tab delimited file must have at least 2 " + "lines (one for the header, and one or more for the data)"); } ArrayList<Integer> columnIndicesForDropDowns = columnIndicesForDropDowns(fileLines.get(0)); List<String[]> dataLines = fileLines.subList(1, fileLines.size()); LinkedHashMap<String, LinkedHashSet<String>> choicesByDropdownId = new LinkedHashMap<String, LinkedHashSet<String>>(); String prefix = getName() + " dropdown MultiLevelMultiSelect 0"; choicesByDropdownId.put(prefix, new LinkedHashSet<String>()); for (int i = 0; i < columnIndicesForDropDowns.size(); ++i) { String prettyCurrentColumnName = value.split(",")[i]; prettyCurrentColumnName = prettyCurrentColumnName.toLowerCase(); prettyCurrentColumnName = prettyCurrentColumnName.replace("_", " "); for (String[] dataLine : dataLines) { String priorLevelDropdownId = prefix; String currentLevelDropdownId = prefix; int column = 0; for (int j = 0; j <= i; ++j) { column = columnIndicesForDropDowns.get(j); if (j < i) { priorLevelDropdownId += " " + dataLine[column]; } currentLevelDropdownId += " " + dataLine[column]; } if (i != columnIndicesForDropDowns.size() - 1) { choicesByDropdownId.put(currentLevelDropdownId, new LinkedHashSet<String>()); } LinkedHashSet<String> choicesForPriorDropdown = choicesByDropdownId.get(priorLevelDropdownId); choicesForPriorDropdown.add("Select a " + prettyCurrentColumnName + "..."); choicesForPriorDropdown.add(dataLine[column]); } } return choicesByDropdownId; }
From source file:eionet.cr.dao.virtuoso.VirtuosoEndpointHarvestQueryDAO.java
@Override public void move(String endpointUrl, Set<Integer> ids, int direction) throws DAOException { if (StringUtils.isBlank(endpointUrl) || ids == null || ids.isEmpty()) { return;//w w w .j a va 2 s . c o m } if (direction == 0) { throw new IllegalArgumentException("Direction must not be 0!"); } // Prepare map where we can get queries by position, also find the max and min positions. LinkedHashMap<Integer, EndpointHarvestQueryDTO> queriesByPos = getQueriesByPosition(endpointUrl); if (queriesByPos.isEmpty()) { return; } Set<Integer> positions = queriesByPos.keySet(); int maxPos = Collections.max(positions); int minPos = Collections.min(positions); Connection conn = null; try { conn = getSQLConnection(); conn.setAutoCommit(false); // If even one query is already at position 1 then moving up is not considered possible. // And conversely, if even one query is already at the last position, then moving down // is not considered possible either. boolean isMovingPossible = true; List<Integer> selectedPositions = new ArrayList<Integer>(); List<EndpointHarvestQueryDTO> queries = new ArrayList<EndpointHarvestQueryDTO>(queriesByPos.values()); for (EndpointHarvestQueryDTO query : queries) { if (ids.contains(query.getId())) { int pos = query.getPosition(); if ((direction < 0 && pos == minPos) || (direction > 0 && pos == maxPos)) { isMovingPossible = false; } else { selectedPositions.add(pos); } } } if (isMovingPossible) { if (direction < 0) { for (Integer selectedPosition : selectedPositions) { EndpointHarvestQueryDTO queryToMove = queriesByPos.get(selectedPosition); int i = queries.indexOf(queryToMove); queries.set(i, queries.get(i - 1)); queries.set(i - 1, queryToMove); } } else { for (int j = selectedPositions.size() - 1; j >= 0; j--) { EndpointHarvestQueryDTO queryToMove = queriesByPos.get(selectedPositions.get(j)); int i = queries.indexOf(queryToMove); queries.set(i, queries.get(i + 1)); queries.set(i + 1, queryToMove); } } } SQLUtil.executeUpdate(INCREASE_POSITIONS_SQL, Arrays.asList(maxPos, endpointUrl), conn); for (int i = 0; i < queries.size(); i++) { SQLUtil.executeUpdate(UPDATE_POSITION_SQL, Arrays.asList(i + 1, queries.get(i).getId()), conn); } conn.commit(); } catch (Exception e) { SQLUtil.rollback(conn); throw new DAOException(e.getMessage(), e); } finally { SQLUtil.close(conn); } }
From source file:amulet.resourceprofiler.ResourceProfiler.java
/** * Helper function for getting the AVG. ENERGY COST for a particular resource. * @param resource/*from w ww .j a v a 2 s. c om*/ * @param qmapp * @param deviceInfo * @param steadyStateInfo * @param api_energy_lookup * @return */ private double getCostHelper(Resource resource, QMApp qmapp, DeviceInfo deviceInfo, SteadyStateInfo steadyStateInfo, LinkedHashMap<String, EnergyParam> api_energy_lookup, double[][] fill_rect_lookup, double[][] clear_rect_lookup) { // Compute the avg. energy cost to execute a line of code (LoC). double ENERGY_COST_PER_LOC = deviceInfo.avgNumInstructionsPerLoC * (deviceInfo.avgBasicInstructionPower * deviceInfo.avgBasicInstructionTime); // Define an arbitrary cost to give to un-recognized function calls. double UNKNOWN_COST = UNKNOWN_QUANTITY_SCALAR * ENERGY_COST_PER_LOC; // A cost variable for keeping track of the cost value to be returned to the caller. double cost = resource.cost; switch (resource.type) { case AMULET_API_FUNCTION_CALL: if (api_energy_lookup.containsKey(resource.name)) { // If the Amulet API call is recognized, then we just assign the real measurement value. EnergyParam energyparam = api_energy_lookup.get(resource.name); // TODO: is there a better/more correct way to do this?! // If cost is zero, calculate actual cost with known energy values; otherwise, leave it alone. double scalar = cost; if (cost == 0.0) { scalar = 1.0; } cost = scalar * energyparam.avgPower * energyparam.avgTime; if (resource.name.contains("ClearRect")) { int w = Math.min(resource.getIntExtra("width"), 127); int h = Math.min(resource.getIntExtra("height"), 114); double time_cost = clear_rect_lookup[w][h]; cost = scalar * energyparam.avgPower * time_cost; System.out.println("In clear " + cost); } if (resource.name.contains("FillRect")) { int w = Math.min(resource.getIntExtra("width"), 127); int h = Math.min(resource.getIntExtra("height"), 114); double time_cost = fill_rect_lookup[w][h]; cost = scalar * energyparam.avgPower * time_cost; } } else { // If the Amulet API call is *not* recognized, then we just assign a fixed cost. m_resourceProfilerWarnings.add(" + (!) LOOK-UP WARNING:: Cost for Amulet API function '" + resource.name + "' not found in api_energy_lookup table; assigning UNKNOWN_COST=" + UNKNOWN_COST + "."); cost = UNKNOWN_COST; } // If this call is nested within a loop, the cost of this function call needs to // be multiplied by the number of times this call is actually made. if (resource.isContainedInLoop()) { // System.out.println("**** RESOURCE " + resource.name + " cost was = " + cost); cost *= resource.getNumLoopIterations(); // System.out.println("**** RESOURCE " + resource.name + " is now = " + cost + " (orignal-cost x " + resource.getNumLoopIterations() + ")"); } break; case NON_AMULET_API_FUNCTION_CALL: if (qmapp.operationCostMap.containsKey(resource.name)) { // This is a function defined within the QM application (i.e., an "operation" by QM's definition). cost = qmapp.operationCostMap.get(resource.name) * qmapp.operationTimeMap.get(resource.name); } else { // If the Non-Amulet API call is *not* recognized, then we just assign a fixed cost. m_resourceProfilerWarnings.add(" + (!) LOOK-UP WARNING:: Cost for Non-Amulet API function '" + resource.name + "' not found; assigning UNKNOWN_COST=" + UNKNOWN_COST + "."); cost = UNKNOWN_COST; } // TODO: scalar handling?! // If this call is nested within a loop, the cost of this function call needs to // be multiplied by the number of times this call is actually made. if (resource.isContainedInLoop()) { // System.out.println("**** RESOURCE " + resource.name + " cost was = " + cost); cost *= resource.getNumLoopIterations(); // System.out.println("**** RESOURCE " + resource.name + " is now = " + cost + " (orignal-cost x " + resource.getNumLoopIterations() + ")"); } break; case COMPUTATION: if (resource.name.equals(ComputationType.BASIC_BLOCKS.text())) { // Get the number of lines of code. double nLinesOfCode = resource.getIntExtra(Resource.EXTRA_NUM_LINES_OF_CODE); // Calculate cost. cost = nLinesOfCode * ENERGY_COST_PER_LOC; } else if (resource.name.equals(ComputationType.FOR_LOOP.text())) { // Get the number of lines of code. // If double for-loop, then skip try { double nLinesOfCode = resource.getIntExtra(Resource.EXTRA_LOOP_NUM_STATEMENTS); // Get number of iterations in this loop. double nIterations = resource.getNumLoopIterations(); // Calculate cost. cost = (ENERGY_COST_PER_LOC * nLinesOfCode) * nIterations; } catch (Exception e) { System.err.println("**FAILED RESOURCE PARSING: skipping this resource"); System.err.println(" RESOURCE: " + resource + ""); } } break; case SENSOR_SUBSCRIPTION: if (resource.name.equalsIgnoreCase("ACCELEROMETER")) { cost = steadyStateInfo.sensorAccelerometer; } else if (resource.name.equalsIgnoreCase("HEARTRATE")) { cost = steadyStateInfo.sensorHeartRate; } break; case MEMORY: case GLOBAL_MEMORY: case UNKNOWN: default: } return cost; }
From source file:com.alibaba.wasp.client.WaspAdmin.java
public String describeIndex(String tableName, String indexName) throws IOException { FTable table = getTableDescriptor(Bytes.toBytes(tableName)); LinkedHashMap<String, Index> indexMap = table.getIndex(); Index index = indexMap.get(indexName); if (index == null) { return ""; }//w w w.ja v a2s . c o m StringBuilder builder = new StringBuilder(); builder.append("+----------------------+----------+-------+\n"); builder.append("| INDEX_KEYS |\n"); builder.append("+----------------------+----------+-------+\n"); builder.append("| Field | Type | ORDER |\n"); builder.append("+----------------------+----------+-------+\n"); String line = "| {0} | {1} | {2} |"; LinkedHashMap<String, Field> indexKeys = index.getIndexKeys(); Map<String, Field> storings = index.getStoring(); Set<String> desc = index.getDesc(); for (Field field : indexKeys.values()) { String fieldname = field.getName(); String s0 = fieldname + (fieldname.length() < 20 ? getGivenBlanks(20 - fieldname.length()) : ""); String type = field.getType().toString(); String s1 = type + (type.length() < 8 ? getGivenBlanks(8 - type.length()) : ""); String s2 = desc.contains(fieldname) ? "desc " : "asc "; builder.append(MessageFormat.format(line, s0, s1, s2)); builder.append("\n"); } builder.append("+----------------------+----------+-------+\n"); builder.append("| STORINGS |\n"); builder.append("+----------------------+----------+-------+\n"); builder.append("| Field | Type | ORDER |\n"); builder.append("+----------------------+----------+-------+\n"); for (Field field : storings.values()) { String fieldname = field.getName(); String s0 = fieldname + (fieldname.length() < 15 ? getGivenBlanks(15 - fieldname.length()) : ""); String type = field.getType().toString(); String s1 = type + (type.length() < 8 ? getGivenBlanks(8 - type.length()) : ""); String s2 = desc.contains(fieldname) ? "desc " : "asc "; builder.append(MessageFormat.format(line, s0, s1, s2)); builder.append("\n"); } builder.append("+----------------------+----------+-------+\n"); return builder.toString(); }
From source file:edu.umd.ks.cm.util.siscm.dao.impl.SisCmDaoImpl.java
private String getNaturalLanguageForStatement(String booleanExpression, List<ReqComponentReference> reqComponentList) throws Exception { HashMap reqComponentMap = new HashMap(); LinkedHashMap<Integer, Integer> parPositionMap = new LinkedHashMap<Integer, Integer>(); ArrayList<Integer> parLeftList = new ArrayList<Integer>(); for (ReqComponentReference reqComponent : reqComponentList) { String translation = this.reqComponentTranslator.translate(reqComponent.getReqComponent(), "KUALI.RULE.CATALOG", "en"); if (translation != null && translation.length() > 0 && translation.substring(translation.length() - 1).equals(".")) translation = translation.substring(0, translation.length() - 1); reqComponentMap.put(reqComponent.getBooleanId(), translation); }//from www. j a v a2 s . com BooleanFunction booleanFunction = new BooleanFunction(booleanExpression); List<String> funcSymbs = booleanFunction.getSymbols(); for (int i = 0; i < funcSymbs.size(); i++) { if (funcSymbs.get(i).equals("(")) { parLeftList.add(i); } int parLeftLast = parLeftList.size() - 1; if (funcSymbs.get(i).equals(")")) { parPositionMap.put(parLeftList.get(parLeftLast), i); parLeftList.remove(parLeftLast); } } // For the expression (A + B + (C * D)) want to remove outer () if (parPositionMap.containsKey(0) && parPositionMap.get(0) == funcSymbs.size() - 1) { parPositionMap.remove(0); funcSymbs.set(0, "null"); funcSymbs.set(funcSymbs.size() - 1, "null"); } if (!parPositionMap.isEmpty()) { for (Integer key : parPositionMap.keySet()) { StringBuffer funcSymb = new StringBuffer(""); int pos = 0; String expr = ""; for (int i = key + 1; i < parPositionMap.get(key); i++) { String funcSymbAdd = funcSymbs.get(i); if (!funcSymbAdd.equals("+") && !funcSymbAdd.equals("*") && !funcSymbAdd.equals("null")) { expr = (String) reqComponentMap.get(funcSymbAdd); if (pos == 0 && !funcSymbAdd.substring(0, 1).equals("V") && expr.length() > 2 && expr.substring(0, 1).equals("(") && expr.substring(expr.length() - 1).equals(")")) { expr = expr.substring(1, expr.length() - 1); } pos = 1; //convert the first character of 'expr' to lower case, if necessary if (expr.length() > 0) { char ch0 = expr.charAt(0); if (ch0 <= 'Z' && ch0 >= 'A') { if (expr.length() > 1) { char ch1 = expr.charAt(1); if (ch1 >= 'a' && ch1 <= 'z') { expr = expr.substring(0, 1).toLowerCase() + expr.substring(1); } } else { expr = expr.toLowerCase(); } } } funcSymb.append(expr); } else if (funcSymbAdd.equals("+")) { funcSymb.append("; or "); } else if (funcSymbAdd.equals("*")) { funcSymb.append("; and "); } } // for int i String id = "V" + Integer.toString(key); funcSymb.insert(0, "("); funcSymb.append(")"); reqComponentMap.put(id, funcSymb.toString()); funcSymbs.set(key, id); for (int i = key + 1; i < parPositionMap.get(key) + 1; i++) funcSymbs.set(i, "null"); } } List<String> funcSymbsNew = new ArrayList<String>(); for (int i = 0; i < funcSymbs.size(); i++) { if (!funcSymbs.get(i).equals("null")) funcSymbsNew.add(funcSymbs.get(i)); } String nl = ""; if (funcSymbsNew.size() == 1) { nl = (String) reqComponentMap.get(funcSymbsNew.get(0)); if (nl.substring(0, 1).equals("(") && nl.substring(nl.length() - 1).equals(")")) nl = nl.substring(1, nl.length() - 1); } else { int pos = 0; String expr = ""; for (int i = 0; i < funcSymbsNew.size(); i++) { if (!funcSymbsNew.get(i).equals("*") && !funcSymbsNew.get(i).equals("+")) { expr = (String) reqComponentMap.get(funcSymbsNew.get(i)); if (pos == 0) { if (expr.length() > 2 && expr.substring(0, 1).equals("(") && expr.substring(expr.length() - 1).equals(")")) expr = expr.substring(1, expr.length() - 1); pos = 1; } else { if (funcSymbsNew.get(i).substring(0, 1).equals("V") && expr.length() > 2 && expr.substring(0, 1).equals("(") && expr.substring(expr.length() - 1).equals(")")) expr = expr.substring(1, expr.length() - 1); } nl = nl + expr; } else if (funcSymbsNew.get(i).equals("+")) { if ((i > 0 && funcSymbsNew.get(i - 1).substring(0, 1).equals("V")) || (i < (funcSymbsNew.size() - 1) && funcSymbsNew.get(i + 1).substring(0, 1).equals("V"))) nl = nl + ". Or "; else nl = nl + "; or "; } else if (funcSymbsNew.get(i).equals("*")) { if ((i > 0 && funcSymbsNew.get(i - 1).substring(0, 1).equals("V")) || (i < (funcSymbsNew.size() - 1) && funcSymbsNew.get(i + 1).substring(0, 1).equals("V"))) nl = nl + ". And "; else nl = nl + "; and "; } } } //TODO: Fix Capitalization nl = nl.substring(0, 1).toUpperCase() + nl.substring(1); return nl.trim(); }
From source file:com.tacitknowledge.util.migration.DistributedMigrationProcess.java
/** * Applies necessary patches to the system. * * @param patchInfoStore of the system to run * @param context information and resources that are available to the migration tasks * @return the number of <code>MigrationTask</code>s that have executed * @throws MigrationException if a migration fails * @Override// w w w . j ava2 s . c om */ public final int doMigrations(final PatchInfoStore patchInfoStore, final MigrationContext context) throws MigrationException { log.debug("Starting doMigrations"); // Get all the migrations, with their launchers, then get the list of // just the migrations LinkedHashMap migrationsWithLaunchers = getMigrationTasksWithLaunchers(); List migrations = new ArrayList(); migrations.addAll(migrationsWithLaunchers.keySet()); // make sure the migrations are okay, then sort them validateTasks(migrations); Collections.sort(migrations); validateControlledSystems(patchInfoStore); // determine how many tasks we're going to execute int taskCount = patchDryRun(patchInfoStore, migrationsWithLaunchers); if (taskCount > 0) { log.info("A total of " + taskCount + " patch tasks will execute."); } else { log.info("System up-to-date. No patch tasks will execute."); } // See if we should execute if (isReadOnly()) { if (taskCount > 0) { throw new MigrationException("Unapplied patches exist, but read-only flag is set"); } log.info("In read-only mode - skipping patch application"); return 0; } // Roll through each migration, applying it if necessary taskCount = 0; for (Iterator i = migrations.iterator(); i.hasNext();) { MigrationTask task = (MigrationTask) i.next(); int migrationLevel = task.getLevel().intValue(); boolean shouldApplyPatch = getMigrationRunnerStrategy().shouldMigrationRun(migrationLevel, patchInfoStore); if (shouldApplyPatch && !forceSync) { // Execute the task in the context it was loaded from JdbcMigrationLauncher launcher = (JdbcMigrationLauncher) migrationsWithLaunchers.get(task); // Get all the contexts the task will execute in for (Iterator j = launcher.getContexts().keySet().iterator(); j.hasNext();) { MigrationContext launcherContext = (MigrationContext) j.next(); applyPatch(launcherContext, task, true); } taskCount++; } else if (forceSync)// if a sync is forced, need to check all // the contexts to identify the ones out of // sync { boolean patchesApplied = false; ArrayList outOfSyncContexts = new ArrayList(); // first need to iterate over all the contexts and determined // which one's are out of sync. // can't sync yet because if there are multiple contexts that // are out of sync, after the // first one is synced, the remaining one's have their patch // level updated via the // MigrationListener.migrationSuccessful event. JdbcMigrationLauncher launcher = (JdbcMigrationLauncher) migrationsWithLaunchers.get(task); for (Iterator j = launcher.getContexts().keySet().iterator(); j.hasNext();) { MigrationContext launcherContext = (MigrationContext) j.next(); PatchInfoStore patchInfoStoreOfContext = (PatchInfoStore) launcher.getContexts() .get(launcherContext); if (!getMigrationRunnerStrategy().isSynchronized(patchInfoStore, patchInfoStoreOfContext)) { outOfSyncContexts.add(launcherContext); } } // next patch the contexts that have been determined to be out // of sync for (Iterator iter = outOfSyncContexts.iterator(); iter.hasNext();) { MigrationContext launcherContext = (MigrationContext) iter.next(); applyPatch(launcherContext, task, true); patchesApplied = true; } if (patchesApplied) { taskCount++; } } // else if forceSync } if (taskCount > 0) { log.info("Patching complete (" + taskCount + " patch tasks executed)"); } else { log.info("System up-to-date. No patch tasks have been run."); } return taskCount; }
From source file:com.sonicle.webtop.vfs.Service.java
public void processManageGridFiles(HttpServletRequest request, HttpServletResponse response, PrintWriter out) { ArrayList<JsGridFile> items = new ArrayList<>(); try {/*from w w w. j a v a2s. c o m*/ String crud = ServletUtils.getStringParameter(request, "crud", true); if (crud.equals(Crud.READ)) { String parentFileId = ServletUtils.getStringParameter(request, "fileId", null); StoreNodeId parentNodeId = (StoreNodeId) new StoreNodeId().parse(parentFileId); int storeId = Integer.valueOf(parentNodeId.getStoreId()); StoreShareFolder folder = getFolderFromCache(storeId); String path = (parentNodeId.getSize() == 2) ? "/" : parentNodeId.getPath(); boolean showHidden = us.getShowHiddenFiles(); LinkedHashMap<String, SharingLink> dls = manager.listDownloadLinks(storeId, path); LinkedHashMap<String, SharingLink> uls = manager.listUploadLinks(storeId, path); StoreFileSystem sfs = manager.getStoreFileSystem(storeId); for (FileObject fo : manager.listStoreFiles(StoreFileType.FILE_OR_FOLDER, storeId, path)) { if (!showHidden && VfsUtils.isFileObjectHidden(fo)) continue; // Relativize path and force trailing separator if file is a folder final String filePath = fo.isFolder() ? PathUtils.ensureTrailingSeparator(sfs.getRelativePath(fo), false) : sfs.getRelativePath(fo); final String fileId = new StoreNodeId(parentNodeId.getShareId(), parentNodeId.getStoreId(), filePath).toString(); final String fileHash = manager.generateStoreFileHash(storeId, filePath); items.add(new JsGridFile(folder, fo, fileId, dls.get(fileHash), uls.get(fileHash))); } new JsonResult("files", items).printTo(out); } } catch (Exception ex) { logger.error("Error in action ManageGridFiles", ex); new JsonResult(false, "Error").printTo(out); } }
From source file:com.moded.extendedchoiceparameter.ExtendedChoiceParameterDefinition.java
LinkedHashMap<String, LinkedHashSet<String>> calculateChoicesByDropdownId() throws Exception { List<String[]> fileLines = new CSVReader(new FileReader(propertyFile), '\t').readAll(); if (fileLines.size() < 2) { throw new Exception("Multi level tab delimited file must have at least 2 " + "lines (one for the header, and one or more for the data)"); }/*from ww w.j a v a2s .c om*/ ArrayList<Integer> columnIndicesForDropDowns = columnIndicesForDropDowns(fileLines.get(0)); List<String[]> dataLines = fileLines.subList(1, fileLines.size()); LinkedHashMap<String, LinkedHashSet<String>> choicesByDropdownId = new LinkedHashMap<String, LinkedHashSet<String>>(); String prefix = getName() + " dropdown MultiLevelMultiSelect 0"; choicesByDropdownId.put(prefix, new LinkedHashSet<String>()); for (int i = 0; i < columnIndicesForDropDowns.size(); ++i) { String prettyCurrentColumnName = value.split(",")[i]; prettyCurrentColumnName = prettyCurrentColumnName.toLowerCase(); prettyCurrentColumnName = prettyCurrentColumnName.replace("_", " "); for (String[] dataLine : dataLines) { String priorLevelDropdownId = prefix; String currentLevelDropdownId = prefix; int column = 0; for (int j = 0; j <= i; ++j) { column = columnIndicesForDropDowns.get(j); if (j < i) { priorLevelDropdownId += " " + dataLine[column]; } currentLevelDropdownId += " " + dataLine[column]; } if (i != columnIndicesForDropDowns.size() - 1) { choicesByDropdownId.put(currentLevelDropdownId, new LinkedHashSet<String>()); } LinkedHashSet<String> choicesForPriorDropdown = choicesByDropdownId.get(priorLevelDropdownId); choicesForPriorDropdown.add("Select a " + prettyCurrentColumnName + "..."); choicesForPriorDropdown.add(dataLine[column]); } } return choicesByDropdownId; }