Example usage for java.util HashMap clear

List of usage examples for java.util HashMap clear

Introduction

In this page you can find the example usage for java.util HashMap clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the mappings from this map.

Usage

From source file:org.apache.flex.compiler.clients.ASC.java

/**
 * Compile one source file. Each source file has its own symbol table.
 * //from w  w  w .ja  v a 2s .  c  o m
 * @param workspace workspace
 * @param sourceFilename source filename
 * @throws InterruptedException compiler thread error
 * @return true compiled without problem
 */
private boolean compileSourceFiles(final Workspace workspace, final List<String> sourceFilenames)
        throws InterruptedException {
    boolean success = true;
    long startTime = System.nanoTime();
    int problemCount = 0;

    //  Set up a problem query object to check the result of the compilation.
    //  Some problems found aren't ever relevant to ASC, and some depend on 
    //  the switches passed on the command line.
    problemQuery = new ProblemQuery();
    problemQuery.setShowProblemByClass(MultipleExternallyVisibleDefinitionsProblem.class, false);
    problemQuery.setShowProblemByClass(UnfoundPropertyProblem.class, false);
    problemQuery.setShowStrictSemantics(useStaticSemantics());
    problemQuery.setShowWarnings(getShowWarnings());

    // process source AS3 files
    Set<ICompilationUnit> mainUnits = new LinkedHashSet<ICompilationUnit>(getSourceFilenames().size());
    final HashMap<ICompilationUnit, Integer> unitOrdering = new HashMap<ICompilationUnit, Integer>();

    ASCProject applicationProject = createProject(workspace, problemQuery);

    // Add any problems from parsing config vars supplied on the command line
    List<ICompilerProblem> configProblems = new ArrayList<ICompilerProblem>();
    applicationProject.collectProblems(configProblems);
    problemQuery.addAll(configProblems);

    int i = 0;
    for (final String sourceFilename : sourceFilenames) {
        // If we are not merging then create a new project
        // and set the compilation units.
        if (i > 0 && !getMergeABCs()) {
            applicationProject = createProject(workspace, problemQuery);
            mainUnits.clear();
            unitOrdering.clear();
            problemQuery.clear();
        }

        final IFileSpecification sourceFileSpec = new FileSpecification(sourceFilename);
        workspace.fileAdded(sourceFileSpec);
        final ICompilationUnit cu = ASCompilationUnit.createMainCompilationUnitForASC(applicationProject,
                sourceFileSpec, this);
        mainUnits.add(cu);
        unitOrdering.put(cu, unitOrdering.size());

        // add compilation unit to project
        applicationProject.addCompilationUnit(cu);
        applicationProject.updatePublicAndInternalDefinitions(Collections.singletonList(cu));

        // The logic that re-parses a garbage collected syntax tree, does not
        // know about the files included with the -in option, so we'll pin
        // the syntax tree here so we know we will never need to re-parse the
        // the synax tree for the root compilation unit.
        rootedSyntaxTrees.add(cu.getSyntaxTreeRequest().get().getAST());

        // syntax errors
        for (final ICompilationUnit compilationUnit : applicationProject.getCompilationUnits()) {
            final ICompilerProblem[] problems = compilationUnit.getSyntaxTreeRequest().get().getProblems();
            problemQuery.addAll(problems);
        }

        //  Parse trees
        if (getShowParseTrees()) {
            final String outputSyntaxFilename = FilenameUtils.removeExtension(sourceFilename).concat(".p");
            try {
                PrintWriter syntaxFile = new PrintWriter(outputSyntaxFilename);
                final IASNode ast = cu.getSyntaxTreeRequest().get().getAST();
                if (ast instanceof FileNode) {
                    // Parse the full tree and add the new problems found in the
                    // function bodies into the problem collection.
                    final FileNode fileNode = (FileNode) ast;
                    final ImmutableSet<ICompilerProblem> skeletonProblems = ImmutableSet
                            .copyOf(fileNode.getProblems());
                    fileNode.populateFunctionNodes();
                    final ImmutableSet<ICompilerProblem> allProblems = ImmutableSet
                            .copyOf(fileNode.getProblems());

                    // Only add newly found problems. Otherwise, there will be
                    // duplicates in "problemQuery".
                    final SetView<ICompilerProblem> difference = Sets.difference(skeletonProblems, allProblems);
                    problemQuery.addAll(difference);
                }

                syntaxFile.println(ast);
                syntaxFile.flush();
                syntaxFile.close();
            } catch (FileNotFoundException e) {
                problemQuery.add(new FileWriteProblem(e));
            }
        }

        // output
        // For the merged case, wait until the last source file.
        // For the non-merged case, make each source file individually
        if (!getMergeABCs() || (getMergeABCs() && (i == sourceFilenames.size() - 1))) {

            // Let's start up all the compilation units to try and get more threads generating code
            // at the same time.
            for (final ICompilationUnit compilationUnit : applicationProject.getCompilationUnits()) {
                compilationUnit.startBuildAsync(TargetType.SWF);
            }

            //  Run the resolveRefs() logic for as long as it's relevant.
            for (final ICompilationUnit compilationUnit : applicationProject.getCompilationUnits()) {
                final ICompilerProblem[] problems = compilationUnit.getOutgoingDependenciesRequest().get()
                        .getProblems();
                problemQuery.addAll(problems);
            }

            String outputFileBaseName = FilenameUtils.getBaseName(sourceFilename);
            String outputDirectoryName = FilenameUtils.getFullPath(sourceFilename);

            // Apply user specified basename and output directory. The
            // basename is only changed ABCs are merged since each abc
            // needs a unique filename.
            if (getMergeABCs() && getOutputBasename() != null)
                outputFileBaseName = getOutputBasename();

            final String specifiedOutputDirectory = getOutputDirectory();
            if (!Strings.isNullOrEmpty(specifiedOutputDirectory))
                outputDirectoryName = normalizeDirectoryName(specifiedOutputDirectory);

            // Output to either a SWF or ABC file.
            if (isGenerateSWF()) {
                final boolean swfBuilt = generateSWF(outputDirectoryName, outputFileBaseName,
                        applicationProject, mainUnits, sourceFilename, problemQuery, startTime);
                if (!swfBuilt)
                    success = false;
            } else {
                Collection<ICompilationUnit> units = mainUnits;
                if (getMergeABCs()) {
                    // Run the topological sort to figure out which order to output the ABCs in
                    // Resorts to using commandline order rather than a filename based lexical sort in
                    // cases where there are no real dependencies between the scripts
                    units = applicationProject.getDependencyGraph().topologicalSort(mainUnits,
                            new Comparator<ICompilationUnit>() {
                                @Override
                                public int compare(ICompilationUnit o1, ICompilationUnit o2) {
                                    return (unitOrdering.containsKey(o2) ? unitOrdering.get(o2) : 0)
                                            - (unitOrdering.containsKey(o1) ? unitOrdering.get(o1) : 0);
                                }
                            });
                    Collection<ICompilationUnit> sourceUnits = new ArrayList<ICompilationUnit>(
                            mainUnits.size());
                    for (ICompilationUnit unit : units) {
                        // The dependency graph will put all CompilationUnits in the results, but
                        // we only want the CUs for the source files, since the imports should not be merged
                        // into the resulting ABC
                        if (mainUnits.contains(unit)) {
                            sourceUnits.add(unit);
                        }
                    }
                    units = sourceUnits;
                }
                final boolean abcBuilt = generateABCFile(outputDirectoryName, outputFileBaseName,
                        applicationProject, units, sourceFilename, problemQuery, startTime);
                if (!abcBuilt)
                    success = false;
            }

            //*************************************
            // Report problems.
            //

            // let's make a categorizer, so we can differentiate errors and warnings
            CompilerProblemCategorizer compilerProblemCategorizer = new CompilerProblemCategorizer();
            problemFormatter = new WorkspaceProblemFormatter(workspace, compilerProblemCategorizer);
            ProblemPrinter printer = new ProblemPrinter(problemFormatter, err);
            problemCount += printer.printProblems(problemQuery.getFilteredProblems());

            startTime = System.nanoTime();
        }
        i++;
    }

    // If there were problems, print out the summary
    if (problemCount > 0) {
        Collection<ICompilerProblem> errors = new ArrayList<ICompilerProblem>();
        Collection<ICompilerProblem> warnings = new ArrayList<ICompilerProblem>();
        problemQuery.getErrorsAndWarnings(errors, warnings);

        int errorCount = errors.size();
        int warningCount = warnings.size();

        if (errorCount == 1) {
            err.println();
            err.println("1 error found");
        } else if (errorCount > 1) {
            err.println();
            err.println(errorCount + " errors found");
        }

        if (warningCount == 1) {
            err.println();
            err.println("1 warning found");
        } else if (warningCount > 1) {
            err.println();
            err.println(warningCount + " warnings found");
        }

        if (success && (errorCount > 0)) {
            success = false;
        }
    }

    return success;
}

From source file:ddf.catalog.pubsub.PredicateTest.java

@Test
public void testMultipleCriteriaWithContentTypes() throws Exception {
    String methodName = "testMultipleCriteriaWithContentTypes";
    LOGGER.debug("***************  START: " + methodName + "  *****************");

    MockQuery query = new MockQuery();

    DatatypeFactory df = DatatypeFactory.newInstance();
    XMLGregorianCalendar start = df.newXMLGregorianCalendarDate(2011, 10, 25, 0);
    XMLGregorianCalendar end = df.newXMLGregorianCalendarDate(2011, 10, 27, 0);
    query.addTemporalFilter(start, end, Metacard.EFFECTIVE);

    // create content type criteria
    String version1 = "version1";
    String type1 = "type1";

    List<MockTypeVersionsExtension> extensions = new ArrayList<>();
    MockTypeVersionsExtension ext1 = new MockTypeVersionsExtension();
    List<String> ext1Versions = ext1.getVersions();
    ext1Versions.add(version1);//w  w w . jav a 2 s  . co  m
    ext1.setExtensionTypeName(type1);
    extensions.add(ext1);

    query.addTypeFilter(extensions);

    SubscriptionFilterVisitor visitor = new SubscriptionFilterVisitor();
    Predicate pred = (Predicate) query.getFilter().accept(visitor, null);
    LOGGER.debug("resulting predicate: " + pred);

    Filter filter = query.getFilter();
    FilterTransformer transform = new FilterTransformer();
    transform.setIndentation(2);
    String filterXml = transform.transform(filter);
    LOGGER.debug(filterXml);

    // input that passes both temporal and content type
    LOGGER.debug("\npass temporal and pass content type.\n");
    MetacardImpl metacard = new MetacardImpl();
    metacard.setCreatedDate(new Date());
    metacard.setExpirationDate(new Date());
    metacard.setModifiedDate(new Date());
    metacard.setMetadata(TestDataLibrary.getCatAndDogEntry());

    XMLGregorianCalendar cal = df.newXMLGregorianCalendarDate(2011, 10, 26, 0);
    Date effectiveDate = cal.toGregorianCalendar().getTime();
    metacard.setEffectiveDate(effectiveDate);

    HashMap<String, Object> properties = new HashMap<>();
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);

    Map<String, Object> contextualMap = constructContextualMap(metacard);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    // Above Pulled from PubSubProviderImpl

    properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, type1 + "," + version1);
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    Event testEvent = new Event("topic", properties);
    boolean b = pred.matches(testEvent);
    assertTrue(b);

    // input that fails both temporal and content type
    LOGGER.debug("\nfail temporal.  fail content type.\n");
    XMLGregorianCalendar cal1 = df.newXMLGregorianCalendarDate(2012, 10, 30, 0); // time out of
    // range
    Date effectiveDate1 = cal1.toGregorianCalendar().getTime();
    metacard.setEffectiveDate(effectiveDate1);
    LOGGER.debug("metacard date: " + metacard.getEffectiveDate());

    properties.clear();
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, "invalid_type" + "," + version1);
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    testEvent = new Event("topic", properties);
    assertFalse(pred.matches(testEvent));

    // input that passes temporal and fails content type
    LOGGER.debug("\npass temporal.  fail content type\n");
    XMLGregorianCalendar cal2 = df.newXMLGregorianCalendarDate(2011, 10, 26, 0); // time in
    // range
    Date effectiveDate2 = cal2.toGregorianCalendar().getTime();
    metacard.setEffectiveDate(effectiveDate2);
    LOGGER.debug("metacard date: " + metacard.getEffectiveDate());

    properties.clear();
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, "invalid_type" + "," + version1);
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    testEvent = new Event("topic", properties);
    assertFalse(pred.matches(testEvent));

    // input that fails temporal and passes content type
    LOGGER.debug("\nfail temporal.  pass content type\n");
    XMLGregorianCalendar cal3 = df.newXMLGregorianCalendarDate(2012, 10, 26, 0); // time out of
    // range
    Date effectiveDate3 = cal3.toGregorianCalendar().getTime();
    metacard.setEffectiveDate(effectiveDate3);
    LOGGER.debug("metacard date: " + metacard.getEffectiveDate());

    properties.clear();
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, type1 + "," + version1);
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    testEvent = new Event("topic", properties);
    assertFalse(pred.matches(testEvent));

    // multiple content types
    LOGGER.debug("\nTesting multiple content types.\n");

    String type2 = "type2";
    String version2 = "version2";
    MockTypeVersionsExtension ext2 = new MockTypeVersionsExtension();
    List<String> ext2Versions = ext2.getVersions();
    ext2Versions.add(version2);
    ext2.setExtensionTypeName(type2);
    extensions.add(ext2);

    // No version
    String type3 = "type3";
    MockTypeVersionsExtension ext3 = new MockTypeVersionsExtension();
    ext3.setExtensionTypeName(type3);
    extensions.add(ext3);

    MockQuery query2 = new MockQuery();
    query2.addTemporalFilter(start, end, Metacard.EFFECTIVE);
    query2.addTypeFilter(extensions);
    SubscriptionFilterVisitor visitor1 = new SubscriptionFilterVisitor();
    Predicate pred1 = (Predicate) query2.getFilter().accept(visitor1, null);
    LOGGER.debug("resulting predicate: " + pred1);

    // Create metacard for input
    // time and contentType match
    XMLGregorianCalendar cal4 = df.newXMLGregorianCalendarDate(2011, 10, 26, 0); // time in
    // range
    Date effectiveDate4 = cal4.toGregorianCalendar().getTime();
    metacard.setEffectiveDate(effectiveDate4);
    LOGGER.debug("metacard date: " + metacard.getEffectiveDate());

    properties.clear();
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, type1 + "," + version1);
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    testEvent = new Event("topic", properties);
    assertTrue(pred1.matches(testEvent));

    // time and contentType match against content type 3 with any version
    XMLGregorianCalendar cal5 = df.newXMLGregorianCalendarDate(2011, 10, 26, 0); // time in
    // range
    Date effectiveDate5 = cal5.toGregorianCalendar().getTime();
    metacard.setEffectiveDate(effectiveDate5);
    LOGGER.debug("metacard date: " + metacard.getEffectiveDate());

    properties.clear();
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, type3 + "," + "random_version");
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    testEvent = new Event("topic", properties);
    assertTrue(pred1.matches(testEvent));

    // time matches and contentType matches type2
    XMLGregorianCalendar cal6 = df.newXMLGregorianCalendarDate(2011, 10, 26, 0); // time in
    // range
    Date effectiveDate6 = cal6.toGregorianCalendar().getTime();
    metacard.setEffectiveDate(effectiveDate6);
    LOGGER.debug("metacard date: " + metacard.getEffectiveDate());

    properties.clear();
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, type2 + "," + version2);
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    testEvent = new Event("topic", properties);
    assertTrue(pred1.matches(testEvent));

    // time matches and content type doesn't match
    XMLGregorianCalendar cal7 = df.newXMLGregorianCalendarDate(2011, 10, 26, 0); // time in
    // range
    Date effectiveDate7 = cal7.toGregorianCalendar().getTime();
    metacard.setEffectiveDate(effectiveDate7);
    LOGGER.debug("metacard date: " + metacard.getEffectiveDate());

    properties.clear();
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    properties.put(PubSubConstants.HEADER_CONTENT_TYPE_KEY, type2 + "," + version1);
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    testEvent = new Event("topic", properties);
    assertFalse(pred1.matches(testEvent));

    LOGGER.debug("***************  END: " + methodName + "  *****************");
}

From source file:ddf.catalog.pubsub.PredicateTest.java

@Test
public void testMultipleCriteria() throws Exception {
    String methodName = "testMultipleCriteria";
    LOGGER.debug("***************  START: " + methodName + "  *****************");

    // test with temporal, spatial, and entry
    MockQuery query = new MockQuery();

    DatatypeFactory df = DatatypeFactory.newInstance();
    XMLGregorianCalendar start = df.newXMLGregorianCalendarDate(2011, 10, 25, 0);
    XMLGregorianCalendar end = df.newXMLGregorianCalendarDate(2011, 10, 27, 0);
    query.addTemporalFilter(start, end, Metacard.MODIFIED);

    String wkt = "POLYGON((0 10, 0 0, 10 0, 10 10, 0 10))";
    query.addSpatialFilter(wkt, 0.0, "Meter", "CONTAINS");

    // create entry criteria
    String catalogId = "ABC123";
    query.addEntryFilter(catalogId);/*from ww  w.  ja  va  2 s .c o m*/

    MetacardImpl metacard = new MetacardImpl();
    metacard.setLocation("POINT(5 5)");
    metacard.setId(catalogId);
    metacard.setCreatedDate(new Date());
    metacard.setExpirationDate(new Date());
    metacard.setEffectiveDate(new Date());
    metacard.setMetadata(TestDataLibrary.getCatAndDogEntry());

    XMLGregorianCalendar cal = df.newXMLGregorianCalendarDate(2011, 10, 26, 0);
    Date modifiedDate = cal.toGregorianCalendar().getTime();
    metacard.setModifiedDate(modifiedDate);

    HashMap<String, Object> properties = new HashMap<>();
    properties.put(PubSubConstants.HEADER_ID_KEY, metacard.getId());
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);

    Map<String, Object> contextualMap = constructContextualMap(metacard);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    // Above Pulled from PubSubProviderImpl

    Event testEvent = new Event("topic", properties);

    // input passes temporal, id, and geo
    SubscriptionFilterVisitor visitor = new SubscriptionFilterVisitor();
    Predicate pred = (Predicate) query.getFilter().accept(visitor, null);
    LOGGER.debug("resulting predicate: " + pred);

    Filter filter = query.getFilter();
    FilterTransformer transform = new FilterTransformer();
    transform.setIndentation(2);
    String filterXml = transform.transform(filter);
    LOGGER.debug(filterXml);

    assertTrue(pred.matches(testEvent));

    // input passes temporal, id, but fails geo
    metacard.setLocation("POINT(5 50)"); // geo out of range
    properties.clear();
    properties.put(PubSubConstants.HEADER_ID_KEY, metacard.getId());
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);

    // Below Pulled from PubSubProviderImpl
    contextualMap = constructContextualMap(metacard);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    // Above Pulled from PubSubProviderImpl

    testEvent = new Event("topic", properties);
    assertFalse(pred.matches(testEvent));

    // input passes geo, and id, but fails temporal
    metacard.setLocation("POINT(5 5)");
    XMLGregorianCalendar cal1 = df.newXMLGregorianCalendarDate(2011, 10, 28, 0);
    Date modifiedDate1 = cal1.toGregorianCalendar().getTime();
    metacard.setModifiedDate(modifiedDate1); // date out of range
    properties.clear();
    properties.put(PubSubConstants.HEADER_ID_KEY, metacard.getId());
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);

    // Below Pulled from PubSubProviderImpl
    contextualMap = constructContextualMap(metacard);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    // Above Pulled from PubSubProviderImpl

    testEvent = new Event("topic", properties);
    assertFalse(pred.matches(testEvent));

    // input passes temporal, geo, but fails id
    XMLGregorianCalendar cal2 = df.newXMLGregorianCalendarDate(2011, 10, 26, 0);
    Date modifiedDate2 = cal2.toGregorianCalendar().getTime();
    metacard.setModifiedDate(modifiedDate2);
    metacard.setId("invalid_id"); // bad id
    properties.clear();
    properties.put(PubSubConstants.HEADER_ID_KEY, metacard.getId());
    properties.put(PubSubConstants.HEADER_ENTRY_KEY, metacard);
    properties.put(PubSubConstants.HEADER_OPERATION_KEY, PubSubConstants.CREATE);

    // Below Pulled from PubSubProviderImpl
    contextualMap = constructContextualMap(metacard);
    properties.put(PubSubConstants.HEADER_CONTEXTUAL_KEY, contextualMap);
    // Above Pulled from PubSubProviderImpl

    testEvent = new Event("topic", properties);
    assertFalse(pred.matches(testEvent));

    LOGGER.debug("***************  END: " + methodName + "  *****************");
}

From source file:org.alfresco.repo.version.VersionServiceImplTest.java

public void test_MNT10404() {
    String test_run = System.currentTimeMillis() + "";
    final String test_user = "userUsageTestUser-" + test_run;
    final String document_name = "test_MNT10404" + test_run + ".txt";

    final String theFirstContent = "This is simple content.";
    final String theSecondContent = "Update content.";

    NodeRef document = null;/* www  .  java  2  s.co  m*/

    try {
        // create user
        if (personService.personExists(test_user)) {
            personService.deletePerson(test_user);
        }

        HashMap<QName, Serializable> properties = new HashMap<QName, Serializable>();
        properties.put(ContentModel.PROP_USERNAME, test_user);

        NodeRef personNodeRef = personService.createPerson(properties);

        assertNotNull(personNodeRef);

        // create node
        properties.clear();
        properties.put(ContentModel.PROP_NAME, document_name);

        document = nodeService.createNode(this.rootNodeRef, ContentModel.ASSOC_CONTAINS,
                QName.createQName(ContentModel.USER_MODEL_URI, document_name), ContentModel.TYPE_CONTENT,
                properties).getChildRef();
        contentService.getWriter(document, ContentModel.PROP_CONTENT, true).putContent(theFirstContent);

        // add write permission
        permissionService.setPermission(document, test_user, PermissionService.WRITE_CONTENT, true);

        // add versionable aspect as system user
        final NodeRef doc = document;

        RunAsWork<Void> getWork = new RunAsWork<Void>() {
            @Override
            public Void doWork() throws Exception {
                Map<QName, Serializable> versionProperties = new HashMap<QName, Serializable>();
                versionProperties.put(ContentModel.PROP_VERSION_LABEL, "0.1");
                versionProperties.put(ContentModel.PROP_INITIAL_VERSION, true);
                versionProperties.put(ContentModel.PROP_VERSION_TYPE, VersionType.MINOR);
                nodeService.addAspect(doc, ContentModel.ASPECT_VERSIONABLE, versionProperties);
                return null;
            }
        };
        AuthenticationUtil.runAs(getWork, AuthenticationUtil.getSystemUserName());

        assertTrue(nodeService.hasAspect(document, ContentModel.ASPECT_VERSIONABLE));

        // set content by test_user
        RunAsWork<Void> getWorkSetContent = new RunAsWork<Void>() {
            @Override
            public Void doWork() throws Exception {
                contentService.getWriter(doc, ContentModel.PROP_CONTENT, true).putContent(theSecondContent);
                return null;
            }
        };
        AuthenticationUtil.runAs(getWorkSetContent, test_user);

        assertTrue(theSecondContent
                .equals(contentService.getReader(document, ContentModel.PROP_CONTENT).getContentString()));
    } finally {
        // delete user
        if (personService.personExists(test_user)) {
            personService.deletePerson(test_user);
        }

        // delete node
        if (document != null && nodeService.exists(document)) {
            nodeService.deleteNode(document);
        }
    }
}

From source file:com.funambol.exchange.httptransport.HTTP_FBA_Authentication.java

/**
 * this method implements Form-Based Authentication
 * //  www .j av a  2s. co  m
 * @param hostname
 * @param username
 * @param password
 * @param transport
 * @return
 * @throws IOException
 */
public HTTP_Response doAuth(String hostname, String username, String password, WebDavHttpTransport transport,
        boolean isSSLEnabled) throws IOException {

    int numTries = 1;
    String location = "";
    String cookie = "";
    HashMap<String, String> cookies = new HashMap<String, String>();

    exchangePacket.setSocket(transport.getASocket());
    // For Exchange 07, 'owa' is a better folder to hit, can we put the Exc
    // version in a config somewhere
    // exchangePacket.setCommand("POST /owa/auth/owaauth.dll");
    exchangePacket.setCommand("POST /exchweb/bin/auth/owaauth.dll");
    exchangePacket.setHeader_content_type("application/x-www-form-urlencoded");
    // A URL encoded as "https://<hostname>/exchange/<username>")
    String prefix = "http://";

    if (isSSLEnabled) {
        prefix = "https://";
    }

    String codedURL = URLEncoder.encode(prefix + hostname + "/exchange/" + username, "UTF-8");

    // a packet data
    exchangePacket.setData("destination=" + codedURL + "&username=" + URLEncoder.encode(username, "UTF-8")
            + "&password=" + URLEncoder.encode(password, "UTF-8") + "&SubmitCreds=Log+On&trusted=4");
    exchangePacket.header_host = hostname;

    HTTP_Response response = bz_ExchangeSend(exchangePacket);

    cookie = response.getCookies_Returned();
    processSetCookies(cookies, cookie);
    cookie = createCookieString(cookies);

    while ((response.status_code == 302) && (numTries < MAX_REDIRECTS)) {

        exchangePacket.setSocket(transport.getNewSocket());

        if (log.isWarningEnabled()) {
            log.warn("user " + username + " not authenticated after attempt " + numTries);
        }
        if (log.isTraceEnabled()) {
            log.trace("Return Statuscode: " + response.status_code);
        }
        if (log.isDebugEnabled()) {
            log.debug("Received Location: " + response.location);
        }
        location = response.location;
        location = location.substring(8, location.length());
        location = location.substring(location.indexOf('/'), location.length());
        location = location.trim();
        if (log.isDebugEnabled()) {
            log.debug("Location Path: " + location);
        }
        // Check if login failed
        int reasonIndex = location.indexOf("reason=");
        if (reasonIndex != -1) {
            // Reason 0 is ok, anything else means failure
            int beginIndex = reasonIndex + "reason=".length();
            if (!"0".equals(location.substring(beginIndex, beginIndex + 1))) {
                log.error("Exchange rejected user credentials");
                cookies.clear();
                break;
            }
        }

        exchangePacket.setCommand("GET " + location);

        exchangePacket.setHeader_content_type("");

        exchangePacket.setCookies_set(cookie);
        exchangePacket.setData("");
        exchangePacket.setHeader_referer("");
        exchangePacket.header_content_type = "";

        response = bz_ExchangeSend(exchangePacket);

        cookie = response.getCookies_Returned();
        processSetCookies(cookies, cookie);
        cookie = createCookieString(cookies);

        numTries++;

    }

    response.cookies_returned = createCookieString(cookies);

    return response;

}

From source file:org.openmrs.module.chica.DynamicFormAccess.java

/**
 * Save the results of the fields marked as "Export Field".
 * /*w  w w .j  av  a  2  s  .  co  m*/
 * @param formInstance FormInstance object containing the relevant form information.
 * @param locationTagId The location tag identifier.
 * @param encounterId The encounter identifier associated with the form.
 * @param patient The patient the form belongs to.
 * @param formFieldMap Map from the HTTP request that contains the field name to values.
 * @param parameterHandler The parameterHandler used for rule execution.
 */
public void saveExportElements(FormInstance formInstance, Integer locationTagId, Integer encounterId,
        Patient patient, Map<String, String[]> formFieldMap, ParameterHandler parameterHandler) {
    HashMap<String, Field> fieldMap = new HashMap<String, Field>();
    FormService formService = Context.getFormService();
    Form form = formService.getForm(formInstance.getFormId());
    LinkedHashMap<FormField, String> formFieldToValue = new LinkedHashMap<FormField, String>();
    FieldType exportType = getFieldType("Export Field");
    List<FieldType> fieldTypes = new ArrayList<FieldType>();
    fieldTypes.add(exportType);
    List<FormField> formFields = Context.getService(ChirdlUtilBackportsService.class).getFormFields(form,
            fieldTypes, false);
    List<Integer> fieldIds = new ArrayList<Integer>();
    for (FormField formField : formFields) {
        fieldIds.add(formField.getField().getFieldId());
    }

    Iterator<FormField> formFieldIterator = formFields.iterator();
    while (formFieldIterator.hasNext()) {
        FormField formField = formFieldIterator.next();
        org.openmrs.Field field = formField.getField();
        String fieldName = field.getName();
        if (!formFieldMap.containsKey(fieldName)) {
            continue;
        }

        Field valueField = new Field(fieldName);
        fieldMap.put(fieldName, valueField);
        String[] valueObj = formFieldMap.get(fieldName);
        if (valueObj == null || valueObj.length == 0) {
            formFieldToValue.put(formField, null);
            continue;
        }

        String value = valueObj[0];
        formFieldToValue.put(formField, value);
        valueField.setValue(value);
    }

    consume(formInstance, patient, locationTagId, encounterId, fieldMap, formFieldToValue, parameterHandler,
            form);
    Context.getService(ChicaService.class).saveAnswers(fieldMap, formInstance, encounterId, patient,
            formFieldToValue.keySet());

    fieldMap.clear();
    formFieldToValue.clear();
}

From source file:de.fhg.fokus.odp.portal.uploaddata.service.Worker.java

/**
 * loop through all Cells and rows. Firstly, add correct keys to strings.
 * Secondly, parse corresponding value into correct json and add this
 * dataset to ckan via middleware.//from  w  ww . j  a v a2s.  c  o m
 * 
 * @param args
 * @throws Exception
 * 
 * @return a String of dataset indices, which were not uploaded.
 */
public String readXlsx() {
    final StringBuilder errormessage = new StringBuilder("");
    final StringBuilder resourceStringBuilder = new StringBuilder("[{");
    final StringBuilder extrasStringBuilder = new StringBuilder("{");

    HashMap<String, String> map = new HashMap<String, String>();
    ArrayList<String> strings = new ArrayList<String>();
    XSSFWorkbook workBook = null;
    try {
        workBook = new XSSFWorkbook(uploadFolder + "file.xlsx");
    } catch (IOException e1) {
        e1.printStackTrace();
    }
    int counter = 0;
    XSSFSheet sheet = workBook.getSheetAt(0);
    for (Row row : sheet) {
        for (Cell cell : row) {
            switch (cell.getCellType()) {
            case Cell.CELL_TYPE_STRING:
                String value = cell.getRichStringCellValue().getString();
                // first row, add value to strings
                if (counter == 0) {
                    if (!value.startsWith("resources:") && !value.startsWith("extras:")) {
                        map.put(value, null);
                    }

                    strings.add(value);
                    break;
                }
                // compute columnIndex for later use
                int columnIndex = cell.getColumnIndex();
                // compute parameter for later use in if-statements
                String parameter = strings.get(columnIndex);
                handleString(resourceStringBuilder, extrasStringBuilder, map, value, parameter);
                break;
            case Cell.CELL_TYPE_NUMERIC:
                if (DateUtil.isCellDateFormatted(cell)) {
                    // is a date;
                    parameter = strings.get(cell.getColumnIndex());
                    handleDate(map, parameter, cell, extrasStringBuilder);
                } else {
                    // is a number;
                    parameter = strings.get(cell.getColumnIndex());
                    handleNumber(map, parameter, cell, extrasStringBuilder);
                }
                break;
            default:
                break;
            }
        }
        // finish extras and resources
        finishParseResource(resourceStringBuilder);
        finishParseExtras(extrasStringBuilder);

        Validator.checkTagAndGroupsForEmptyValues(map);
        Validator.setlicenseAndNameToLowerCase(map);

        // add resources and extras to map
        map.put("resources", resourceStringBuilder.toString());
        map.put("extras", extrasStringBuilder.toString());

        createDataSet(errormessage, gw, map, counter);

        ++counter;
        // reset resourceStringBuilder and extrasStringBuilder
        resetStringBuilder(resourceStringBuilder, extrasStringBuilder);

        // reset map
        map.clear();
    }

    if (errormessage.toString().equalsIgnoreCase("")) {
        // no errors
        return errormessage.toString();
    } else {
        // return list of dataset indices
        return errormessage.substring(0, errormessage.length() - 1);
    }
}

From source file:org.apache.solr.cloud.OverseerCollectionProcessor.java

private void completeAsyncRequest(String asyncId, HashMap<String, String> requestMap, NamedList results) {
    if (asyncId != null) {
        waitForAsyncCallsToComplete(requestMap, results);
        requestMap.clear();
    }/*from w ww.j a v a  2  s  . c  o  m*/
}

From source file:edu.ku.brc.specify.toycode.RegPivot.java

/**
 * @param newTblName//w ww . jav  a 2  s  .co  m
 * @param stmt
 * @param pStmt
 * @param fillSQL
 * @param secInx
 * @param dbFieldTypes
 * @param dbFieldNames
 * @param inxToName
 * @return
 * @throws SQLException
 */
private int fillTrackTable(final String newTblName, final Statement stmt, final PreparedStatement pStmt,
        final String fillSQL, final int secInx, final Vector<Integer> dbFieldTypes,
        final Vector<String> dbFieldNames, final HashMap<Integer, String> inxToName) throws SQLException {
    System.out.println("Filling Track Table.");

    int instCnt = 0;

    System.out.println(fillSQL);

    ResultSet rs = stmt.executeQuery(fillSQL);
    ResultSetMetaData rsmd = rs.getMetaData();

    HashMap<String, Integer> nameToIndex = new HashMap<String, Integer>();
    for (int c = 1; c <= rsmd.getColumnCount(); c++) {
        nameToIndex.put(rsmd.getColumnName(c), c);
        System.out.println(c + " - " + rsmd.getColumnName(c));
    }

    boolean debug = false;

    String prevRegId = null;

    HashMap<String, HashMap<String, Object>> colHash = new HashMap<String, HashMap<String, Object>>();

    HashMap<String, Object> nameToVals = new HashMap<String, Object>();

    while (rs.next()) {
        String regId = rs.getString(1);
        if (prevRegId == null)
            prevRegId = regId;

        for (int i = 1; i < secInx; i++) {
            if (debug)
                System.out.println("Put: " + dbFieldNames.get(i - 1) + "  " + dbFieldTypes.get(i - 1) + "  = "
                        + rs.getObject(i));

            if (dbFieldTypes.get(i - 1) == java.sql.Types.TIMESTAMP) {
                try {
                    String ts = rs.getString(i);
                    if (StringUtils.isNotEmpty(ts) && ts.equals("0000-00-00 00:00:00")) {
                        continue;
                    }
                } catch (Exception ex) {
                    continue;
                }
            }
            nameToVals.put(dbFieldNames.get(i - 1), rs.getObject(i));
        }
        String name = rs.getString(secInx);
        name = StringUtils.replace(name, "(", "_");
        name = StringUtils.replace(name, ")", "_");

        if (name.equals("reg_type")) {
            String strVal = (String) rs.getObject(secInx + 2);
            name = strVal + "_number";

            nameToVals.put(name, regId);
            if (debug)
                System.out.println("Put: " + name + " = " + regId);
        } else {
            Integer intVal = (Integer) rs.getObject(secInx + 1);
            String strVal = (String) rs.getObject(secInx + 2);
            nameToVals.put(name, strVal != null ? strVal : intVal);
            if (debug)
                System.out.println("Put: " + name + " = " + intVal + " / " + strVal);
        }

        if (debug)
            System.out.println("-------------------------------------------");

        if (!prevRegId.equals(regId)) {
            String colNum = (String) nameToVals.get("Collection_number");

            if (StringUtils.isNotEmpty(colNum)) {
                copyHash(colNum, colHash, nameToVals);
            }
            prevRegId = regId;
            nameToVals.clear();
        }
    }

    writeHash(colHash, null, pStmt, dbFieldTypes, dbFieldNames, inxToName);

    String alterSQL = "ALTER TABLE " + newTblName + " ADD Lookup VARCHAR(64) AFTER IP";
    BasicSQLUtils.update(connection, alterSQL);

    alterSQL = "ALTER TABLE " + newTblName + " ADD Country VARCHAR(64) AFTER Lookup";
    BasicSQLUtils.update(connection, alterSQL);

    alterSQL = "ALTER TABLE " + newTblName + " ADD City VARCHAR(64) AFTER Country";
    BasicSQLUtils.update(connection, alterSQL);

    return instCnt;
}

From source file:org.kuali.kra.coi.disclosure.CoiDisclosureServiceImpl.java

private List<String> getAwardNumbersForHierarchy(String projectId) {
    List<String> awardNumbers = new ArrayList<String>();
    awardNumbers.add(projectId);/*  w ww.  j a v  a  2 s.  c o  m*/
    HashMap<String, Object> fieldValues = new HashMap<String, Object>();
    fieldValues.put("awardNumber", projectId);
    List<AwardHierarchy> awardHierarchies = (List<AwardHierarchy>) businessObjectService
            .findMatching(AwardHierarchy.class, fieldValues);
    if (CollectionUtils.isNotEmpty(awardHierarchies)) {
        fieldValues.clear();
        fieldValues.put("rootAwardNumber", awardHierarchies.get(0).getRootAwardNumber());
        awardHierarchies = (List<AwardHierarchy>) businessObjectService.findMatching(AwardHierarchy.class,
                fieldValues);
        for (AwardHierarchy awardHierarchy : awardHierarchies) {
            awardNumbers.add(awardHierarchy.getAwardNumber());
        }

    }
    return awardNumbers;

}