Example usage for java.util List toString

List of usage examples for java.util List toString

Introduction

In this page you can find the example usage for java.util List toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:com.ajitkbaral.entitygenerator.controller.GenerateController.java

@RequestMapping(method = RequestMethod.POST)
public String indexPost(HttpServletRequest request, HttpServletResponse response) {
    String result = "";
    String packageName = request.getParameter("packageName");
    String className = request.getParameter("className");
    String variableNames = request.getParameter("variableNames");
    String variableTypes = request.getParameter("variableTypes");

    String defaultPath = request.getParameter("fullPath");
    String filePath = packageName.replace(".", "\\");

    String finalFilePath = defaultPath + "\\" + filePath;
    String finalFilePathWithEntity = finalFilePath + "\\" + className + ".java";

    System.out.println("Variables and types " + variableNames + " " + variableTypes);

    if (!variableNames.equals("") && !variableTypes.equals("")) {

        List<String> variableNameList = new ArrayList<String>();
        List<String> variableTypeList = new ArrayList<String>();

        StringTokenizer variableNameTokenizer = new StringTokenizer(variableNames, ",");
        while (variableNameTokenizer.hasMoreTokens()) {
            variableNameList.add(variableNameTokenizer.nextToken());
        }/*from   w w  w. j av  a  2 s  .  c o m*/

        StringTokenizer variableTypeTokenizer = new StringTokenizer(variableTypes, ",");
        while (variableTypeTokenizer.hasMoreTokens()) {
            variableTypeList.add(variableTypeTokenizer.nextToken());
        }

        System.out.println(variableNameList.toString() + " " + variableTypeList.toString());

        try {
            File fileFolder = new File(finalFilePath);
            if (!fileFolder.exists()) {
                fileFolder.mkdirs();

            }

            FileWriter fileWriter = new FileWriter(finalFilePathWithEntity);

            String content = GenerateClass.packageNameContent(packageName)
                    + GenerateClass.classNameContent(className)
                    + GenerateClass.variableDecleration(variableNameList, variableTypeList)
                    + GenerateClass.defaultConstructor(className)
                    + GenerateClass.overrideConstructor(className, variableNameList, variableTypeList)
                    + GenerateClass.getter(variableNameList, variableTypeList)
                    + GenerateClass.setter(variableNameList, variableTypeList) + GenerateClass.close();
            fileWriter.write(content);
            fileWriter.close();
            System.out.println("DONE");
            result = finalFilePathWithEntity;

        } catch (IOException ioe) {
            result = ioe.getMessage() + ". Please Try again";
        }

    } else {
        result = "Error";
    }
    return result;
}

From source file:edu.ucuenca.authorsrelatedness.Distance.java

public double NWD(String uri1, String end1, String uri2, String end2, String quy) throws Exception {

    List<String> prms = new ArrayList();
    prms.add(uri1 + "+" + end1);
    prms.add(uri2 + "+" + end2);

    prms.add(quy);//from www.j a  v a 2  s. co m

    Collections.sort(prms);

    Double rspc = GetCacheDistance(prms.toString());
    if (rspc == null) {
        Map<String, List<String>> map = new HashMap<>();
        List<String> Authors = new ArrayList();
        Authors.add(uri1);
        Authors.add(uri2);
        List<String> Endpoints = new ArrayList();
        Endpoints.add(end1);
        Endpoints.add(end2);
        Map<String, Double> Result = new HashMap<>();
        for (int i = 0; i < Authors.size(); i++) {
            for (int j = i + 1; j < Authors.size(); j++) {
                String a1 = Authors.get(i);
                String a2 = Authors.get(j);
                List<String> ka1 = null;
                List<String> ka2 = null;
                if (map.containsKey(a1)) {
                    ka1 = map.get(a1);
                } else {
                    ka1 = consultado2(a1, Endpoints.get(i));
                    //String t1_ = traductor(Joiner.on(" | ").join(ka1)).toLowerCase();
                    ka1 = traductor(ka1);//new LinkedList<String>(java.util.Arrays.asList(t1_.split("\\s\\|\\s")));
                    ka1 = clean(ka1);
                    System.out.println(uri1 + "|E:" + Joiner.on(",").join(ka1));
                    ka1 = TopT(ka1, (int) (2.0 * Math.log(ka1.size())));
                    System.out.println(uri1 + "|F:" + Joiner.on(",").join(ka1));
                    map.put(a1, ka1);
                }

                if (map.containsKey(a2)) {
                    ka2 = map.get(a2);
                } else {
                    ka2 = consultado2(a2, Endpoints.get(j));
                    //String t2_ = traductor(Joiner.on(" | ").join(ka2)).toLowerCase();
                    ka2 = traductor(ka2);//new LinkedList<String>(java.util.Arrays.asList(t2_.split("\\s\\|\\s")));
                    ka2 = clean(ka2);
                    System.out.println(uri2 + "|E:" + Joiner.on(",").join(ka2));
                    ka2 = TopT(ka2, (int) (2.0 * Math.log(ka2.size())));
                    System.out.println(uri2 + "|F:" + Joiner.on(",").join(ka2));
                    map.put(a2, ka2);
                }
                //System.out.println(ka1.size() + "," + ka2.size());

                double sum = 0;
                double num = 0;

                for (String t1 : ka1) {
                    for (String t2 : ka2) {
                        num++;
                        String tt1 = t1;
                        String tt2 = t2;
                        double v = NGD(tt1, tt2);
                        sum += v;
                    }
                }
                double prom = sum / num;
                if (num == 0 && sum == 0) {
                    prom = 2;
                }
                Result.put(i + "," + j, prom);
            }
        }

        double r = 0;
        for (Map.Entry<String, Double> cc : Result.entrySet()) {
            r = cc.getValue();
        }
        rspc = r;
        PutCacheDistance(prms.toString(), rspc);
    }
    return rspc;
}

From source file:com.chiralbehaviors.CoRE.meta.models.JobModelTest.java

@Test
public void testJobChronologyOnStatusUpdate() throws Exception {
    JobRecord order = model.getJobModel().newInitializedJob(scenario.getDeliver());
    order.setAssignTo(scenario.getOrderFullfillment().getId());
    order.setProduct(scenario.getAbc486().getId());
    order.setDeliverTo(scenario.getRsb225().getId());
    order.setDeliverFrom(scenario.getFactory1().getId());
    order.setRequester(scenario.getGeorgetownUniversity().getId());
    order.insert();//from   ww  w  .ja  v  a2s . c o  m
    jobModel.changeStatus(order, scenario.getAvailable(), "Test transition");
    model.flush();
    order.refresh();
    List<JobChronologyRecord> chronologies = model.getJobModel().getChronologyForJob(order);
    assertEquals(
            String.format("Invalid number of chronologies: %s",
                    chronologies.stream().map(c -> jobModel.toString(c)).collect(Collectors.toList())),
            2, chronologies.size());
    List<String> fieldErrors = verifyChronologyFields(order, chronologies.get(1));

    assertEquals(fieldErrors.toString(), 0, fieldErrors.size());
    model.getJobModel().changeStatus(order, scenario.getActive(), null);

    model.flush();
    chronologies = model.getJobModel().getChronologyForJob(order);
    assertEquals(3, chronologies.size());
    for (JobChronologyRecord c : chronologies) {
        fieldErrors = verifyChronologyFields(order, c);
        if (fieldErrors == null || fieldErrors.size() == 0) {
            break;
        }
    }
    assertEquals(0, fieldErrors.size());
}

From source file:org.megam.deccanplato.provider.googleapp.handler.UserImpl.java

/**
 * this method lists all user in a domain.
 * users list by calling AppsForYourDomainClient class retrieveAllUsers method 
 * with client credential apclient/*from   w  w  w  .java2s . c  o  m*/
 * @param outMap
 * @return outMap has the list of users.
 */
private Map<String, String> list(Map<String, String> outMap) {
    List<String> list = new ArrayList<String>();
    UserFeed userFeed = null;

    try {
        userFeed = apclient.retrieveAllUsers();
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    if (userFeed != null) {
        for (UserEntry userEntry : userFeed.getEntries()) {

            List<NameValuePair> userlist = new ArrayList<NameValuePair>();
            userlist.add(new BasicNameValuePair(USER_NAME, userEntry.getLogin().getUserName()));
            userlist.add(new BasicNameValuePair(USER_PASSWORD, userEntry.getLogin().getPassword()));
            //outMap.put("user_name", userEntry.getLogin().getAdmin());
            userlist.add(new BasicNameValuePair(FAMILY_NAME, userEntry.getName().getFamilyName()));
            userlist.add(new BasicNameValuePair(GIVEN_NAME, userEntry.getName().getGivenName()));
            list.add(userlist.toString());
            outMap.put(OUTPUT, list.toString());
        }

    }
    return outMap;
}

From source file:de.iteratec.iteraplan.businesslogic.exchange.elasticmi.read.TestExcelFormatCompatibilityBankModel.java

private void testData(InputStream in, ImportStrategy strategy, Map<String, Integer> createCounts,
        Map<String, Integer> updateCounts, Map<String, Integer> deleteCounts) throws Exception {
    process = new MiExcelImportProcess(null, null, null, null, null, strategy, loadTaskFactory, in);

    if (!process.importAndCheckFile()) {
        failWithMessages();/*from  w w w  .  j a  va 2s .c  om*/
    }

    if (doCheckMetamodel) {
        checkMetamodel();
    }

    //skip metamodel checkPoints
    process.getCurrentCheckList().pending(CheckPoint.METAMODEL_COMPARE);
    process.getCurrentCheckList().done(CheckPoint.METAMODEL_COMPARE);
    process.getCurrentCheckList().pending(CheckPoint.METAMODEL_MERGE);
    process.getCurrentCheckList().done(CheckPoint.METAMODEL_MERGE);

    if (!process.dryRun()) {
        failWithMessages();
    }

    MiImportProcessMessages messages = process.getImportProcessMessages();
    Assert.assertEquals(0, messages.getMessages(Severity.ERROR).size());

    List<String> messageStrings = Lists.transform(messages.getMessages(), new Function<Message, String>() {
        public String apply(Message input) {
            return input.getMessage();
        }
    });

    //apart from merge messages there are 3 additional messages issued
    Assert.assertEquals(messageStrings.toString(),
            createCounts.size() + updateCounts.size() + deleteCounts.size() + 3, messageStrings.size());

    for (String key : createCounts.keySet()) {
        RStructuredTypeExpression t = process.getCanonicMetamodel().findStructuredTypeByPersistentName(key);
        AccumulatedCreateMessage expectedMessage = new AccumulatedCreateMessageMock(t,
                createCounts.get(t.getPersistentName()).intValue());
        Assert.assertTrue(expectedMessage.getMessage() + "\n" + messageStrings,
                messageStrings.contains(expectedMessage.getMessage()));
    }
    for (String key : updateCounts.keySet()) {
        RStructuredTypeExpression t = process.getCanonicMetamodel().findStructuredTypeByPersistentName(key);
        AccumulatedUpdateMessage expectedMessage = new AccumulatedUpdateMessageMock(t,
                updateCounts.get(t.getPersistentName()).intValue());
        Assert.assertTrue(expectedMessage.getMessage() + "\n" + messageStrings,
                messageStrings.contains(expectedMessage.getMessage()));
    }
    for (String key : deleteCounts.keySet()) {
        RStructuredTypeExpression t = process.getCanonicMetamodel().findStructuredTypeByPersistentName(key);
        AccumulatedDeleteMessage expectedMessage = new AccumulatedDeleteMessageMock(t,
                deleteCounts.get(t.getPersistentName()).intValue());
        Assert.assertTrue(expectedMessage.getMessage() + "\n" + messageStrings,
                messageStrings.contains(expectedMessage.getMessage()));
    }
}

From source file:edu.harvard.i2b2.util.I2b2UtilTest.java

@Test
public void validateUser()
        throws XQueryUtilException, IOException, JAXBException, AuthenticationFailure, FhirCoreException {
    String pmResponse = I2b2Util.getPmResponseXml(i2b2User, i2b2Password, i2b2Url, i2b2Domain);
    String i2b2Token = I2b2Util.getToken(pmResponse);
    pmResponse = I2b2Util.getPmResponseXml(i2b2User, i2b2Token, i2b2Url, i2b2Domain);

    List<Project> projMap = I2b2Util.getUserProjectMap(pmResponse);

    Assert.assertEquals(projectId, projMap.get(0).getId());
    //Assert.assertEquals("i2b2 Demo",projMap.get(0).getName());
    //logger.info(pmResponse);
    //logger.info("i2b2Token:"+i2b2Token);
    I2b2Util.getAllPatients(i2b2User, i2b2Token, i2b2Url, i2b2Domain, projMap.get(0).getId());
    String pdoXml = I2b2Util.getAllDataPDO(i2b2User, i2b2Password, i2b2Url, i2b2Domain, projectId, patientId,
            items);//from www .  ja v a2 s.  c  om
    Bundle b = I2b2Util.getAllDataForAPatientAsFhirBundle(pdoXml);
    logger.info("projMap:" + projMap.toString());
    //logger.info("pdoAllPtDataXml:"+pdoXml);
    logger.info("Patient Bundle size:" + b.getEntry().size());
}

From source file:fr.jetoile.hadoopunit.component.Neo4jBootstrapTest.java

@Test
public void cypher_query_should_sucess() {

    try (Transaction tx = graphDb.beginTx()) {
        Node myNode = graphDb.createNode();
        myNode.setProperty("name", "my node");
        tx.success();//from   w  w w.  ja  va 2 s  .c o m
    }

    List<String> res = new ArrayList<>();
    try (Transaction ignored = graphDb.beginTx();
            Result result = graphDb.execute("match (n {name: 'my node'}) return n, n.name")) {
        while (result.hasNext()) {
            Map<String, Object> row = result.next();
            for (Map.Entry<String, Object> column : row.entrySet()) {
                res.add(column.getKey() + ": " + column.getValue());
                LOGGER.debug(column.getKey() + ": " + column.getValue());
            }
        }
    }

    assertEquals(2, res.size());
    assertTrue(res.toString().contains("n.name: my node"));
}

From source file:org.apache.drill.exec.store.http.HttpGroupScan.java

private void init() {
    logger.debug("Getting region locations");

    //Collection<DrillbitEndpoint> endpoints = storagePlugin.getContext().getBits();
    List<DrillbitEndpoint> drillbits = Lists.newArrayList(storagePlugin.getContext().getBits());
    logger.info("drillbits: " + drillbits.toString());

    Map<String, DrillbitEndpoint> endpointMap = Maps.newHashMap();
    for (DrillbitEndpoint endpoint : drillbits) {
        endpointMap.put(endpoint.getAddress(), endpoint);
    }/*from  www . jav  a 2s  .  c  om*/
    try {

        //TODO init TASK
        for (int i = 0; i < DBUtil.getPart_data_DBs().size(); i++) {
            HttpWork work = new HttpWork("key" + i * 100, "key" + i * 100 + 99, "mayun",
                    DBUtil.getPart_data_DBs().get(i));

            int bitIndex = i % drillbits.size();
            work.getByteMap().add(drillbits.get(bitIndex), 1000);
            httpWorks.add(work);
        }

    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    /*
    TableName tableName = TableName.valueOf(httpScanSpec.getTableName());
    Connection conn = storagePlugin.getConnection();
            
    try (Admin admin = conn.getAdmin();
         RegionLocator locator = conn.getRegionLocator(tableName)) {
      this.hTableDesc = admin.getTableDescriptor(tableName);
      List<HRegionLocation> regionLocations = locator.getAllRegionLocations();
      //statsCalculator = new TableStatsCalculator(conn, httpScanSpec, storagePlugin.getContext().getConfig(), storagePluginConfig);
              
      //TODO
      logger.info("regionLocations size: " + regionLocations.size());
      regionLocations.add(regionLocations.get(0));
      regionLocations.add(regionLocations.get(0));
      regionLocations.add(regionLocations.get(0));
      logger.info("regionLocations size: " + regionLocations.size());
              
      boolean foundStartRegion = false;
      regionsToScan = new TreeMap<HttpTestDto, ServerName>();
      for ( int i =0 ; i < regionLocations.size(); i++) {
                 
         HRegionLocation regionLocation = regionLocations.get(i);
        HRegionInfo regionInfo = regionLocation.getRegionInfo();
        if (!foundStartRegion && httpScanSpec.getStartRow() != null && httpScanSpec.getStartRow().length != 0 && !regionInfo.containsRow(httpScanSpec.getStartRow())) {
          continue;
        }
        foundStartRegion = true;
                
        HttpTestDto testDto =  new HttpTestDto();
        testDto.setRegionInfo(regionInfo);  
        testDto.setRegionId(i);
                
        regionsToScan.put(testDto, regionLocation.getServerName());
        //scanSizeInBytes += statsCalculator.getRegionSizeInBytes(regionInfo.getRegionName());
        if (httpScanSpec.getStopRow() != null && httpScanSpec.getStopRow().length != 0 && regionInfo.containsRow(httpScanSpec.getStopRow())) {
          break;
        }
      }
    } catch (IOException e) {
      throw new DrillRuntimeException("Error getting region info for table: " + httpScanSpec.getTableName(), e);
    }
            
    logger.info("regionsToScan size: " + regionsToScan.size());
    verifyColumns();*/
}

From source file:com.ggvaidya.scinames.ui.DatasetDiffController.java

private Function<DatasetRow, String> getByUniqueMap() {
    DatasetColumn colByEqual = byUniqueComboBox.getValue();
    if (colByEqual.equals(DATASET_COLUMN_ALL)) {
        return row -> row.toString();
    } else if (colByEqual.equals(DATASET_COLUMN_NAME_ONLY)) {
        // Note that this will combine rows that have identical names, which is not
        // what we want.
        return row -> row.getDataset().getNamesInRow(row).toString();
    } else if (colByEqual.equals(DATASET_COLUMN_BINOMIAL_NAME_CLUSTER)) {
        return row -> {
            Project project = datasetDiffView.getProjectView().getProject();
            NameClusterManager ncm = project.getNameClusterManager();

            List<Name> binomialNames = row.getDataset().getNamesInRow(row).stream().flatMap(n -> n.asBinomial())
                    .collect(Collectors.toList());
            List<NameCluster> nameClusters = ncm.getClusters(binomialNames);
            nameClusters.sort(null);// w ww .j av a2 s  . co  m

            return nameClusters.toString();
        };
    } else if (colByEqual.equals(DATASET_COLUMN_NAME_SPECIFIC_EPITHET)) {
        return row -> row.getDataset().getNamesInRow(row).stream().map(n -> n.getSpecificEpithet())
                .collect(Collectors.toSet()).toString();
    } else {
        return row -> row.get(colByEqual);
    }
}

From source file:com.ge.apm.service.data.DataService.java

private String[] insertColumn(Map<String, String> fields, Map<String, Object> map) {
    List<String> cols = new ArrayList<String>();
    List<Object> vals = new ArrayList<Object>();
    for (Map.Entry<String, Object> entry : map.entrySet()) {
        String obj = fields.get(entry.getKey());
        if (obj == null)
            continue;
        cols.add(fields.get(entry.getKey()));
        Object str = entry.getValue();
        if (str == null || "null".equals(str)) {
            vals.add(str);//from   ww  w  .  jav  a2 s. c  o m
        } else {
            vals.add("'" + str + "'");
        }
    }

    return new String[] { cols.toString().replace("[", "").replace("]", ""),
            vals.toString().replace("[", "").replace("]", "") };
}