Example usage for java.util LinkedHashSet size

List of usage examples for java.util LinkedHashSet size

Introduction

In this page you can find the example usage for java.util LinkedHashSet size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:ubic.gemma.web.controller.expression.experiment.DEDVController.java

/**
 * Prepare vvo for display on front end. Uses factors and factor values from layouts
 *
 * @param vvo Note: This will be modified! It will be updated with the factorNames and factorValuesToNames
 *///from w w  w.ja  va 2 s.c  o  m
private void prepareFactorsForFrontEndDisplay(VisualizationValueObject vvo,
        LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> eeLayouts) {

    if (eeLayouts == null || eeLayouts.isEmpty()) {
        log.warn("No layouts, bail");
        vvo.setFactorNames(null);
        vvo.setFactorValuesToNames(null);
        return;
    }

    LinkedHashSet<ExperimentalFactor> factorNames = getFactorNames(eeLayouts);

    // colours for conditions/factor values bar chart FIXME make continuous maps different.
    Map<ExperimentalFactor, Queue<String>> factorColoursMap = createFactorNameToColoursMap(factorNames);
    String missingValueColour = "#DCDCDC";

    Random random = new Random();

    LinkedHashMap<String, LinkedHashMap<String, String>> factorToValueNames = new LinkedHashMap<>();
    // list of maps with entries: key = factorName, value=array of factor values
    // 1 entry per sample
    List<LinkedHashMap<String, String[]>> factorValueMaps = new ArrayList<>();

    Collection<String> factorsMissingValues = new HashSet<>();

    Collection<BioMaterialValueObject> seenSamples = new HashSet<>(); // if same sample was run more than once on
    // diff platforms.
    Map<Long, FactorValue> fvs = new HashMap<>(); // avoid loading repeatedly.
    Collection<ExperimentalFactor> seenFactors = new HashSet<>();

    for (BioAssayValueObject ba : eeLayouts.keySet()) {

        if (seenSamples.contains(ba.getSample())) {
            continue;
        }
        seenSamples.add(ba.getSample());

        // double should be the factorValue id, defined in
        // ubic.gemma.core.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(ExpressionExperiment,
        // BioAssayDimension)
        LinkedHashMap<ExperimentalFactor, Double> factorMap = eeLayouts.get(ba);
        LinkedHashMap<String, String[]> factorNamesToValueColourPairs = new LinkedHashMap<>(factorNames.size());

        // this is defensive, should only come into play when there's something messed up with the data.
        // for every factor, add a missing-value entry (guards against missing data messing up the layout)
        for (ExperimentalFactor factor : factorNames) {
            String[] facValAndColour = new String[] { "No value", missingValueColour };

            factorNamesToValueColourPairs.put(getUniqueFactorName(factor), facValAndColour);
        }

        // for each experimental factor, store the name and value
        for (Entry<ExperimentalFactor, Double> pair : factorMap.entrySet()) {
            ExperimentalFactor factor = pair.getKey();
            Double valueOrId = pair.getValue();

            /*
             * the double is only a double because it is meant to hold measurements when the factor is continuous if
             * the factor is categorical, the double value is set to the value's id see
             * ubic.gemma.core.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(
             * ExpressionExperiment, BioAssayDimension)
             */
            if (valueOrId == null || factor.getType() == null
                    || (factor.getType().equals(FactorType.CATEGORICAL)
                            && factor.getFactorValues().isEmpty())) {
                factorsMissingValues.add(getUniqueFactorName(factor));
                continue;
            }

            if (!seenFactors.contains(factor) && factor.getType().equals(FactorType.CATEGORICAL)) {
                for (FactorValue fv : factor.getFactorValues()) {
                    fvs.put(fv.getId(), fv);
                }
            }

            String facValsStr = getFacValsStr(fvs, factor, valueOrId);

            if (!factorToValueNames.containsKey(getUniqueFactorName(factor))) {
                factorToValueNames.put(getUniqueFactorName(factor), new LinkedHashMap<String, String>());
            }
            // assign colour if unassigned or fetch it if already assigned
            String colourString = "";
            if (!factorToValueNames.get(getUniqueFactorName(factor)).containsKey(facValsStr)) {
                if (factorColoursMap.containsKey(factor)) {
                    colourString = factorColoursMap.get(factor).poll();
                }
                if (colourString == null || Objects.equals(colourString, "")) { // ran out of predefined colours
                    colourString = getRandomColour(random);
                }
                factorToValueNames.get(getUniqueFactorName(factor)).put(facValsStr, colourString);
            } else {
                colourString = factorToValueNames.get(getUniqueFactorName(factor)).get(facValsStr);
            }
            String[] facValAndColour = new String[] { facValsStr, colourString };

            factorNamesToValueColourPairs.put(getUniqueFactorName(factor), facValAndColour);

        }
        factorValueMaps.add(factorNamesToValueColourPairs);
    }

    // add missing value entries here so they show up at the end of the legend's value lists
    if (!factorsMissingValues.isEmpty()) {
        for (String factorName : factorsMissingValues) {
            if (!factorToValueNames.containsKey(factorName)) {
                factorToValueNames.put(factorName, new LinkedHashMap<String, String>());
            }
            factorToValueNames.get(factorName).put("No value", missingValueColour);
        }
    }
    vvo.setFactorNames(factorToValueNames); // this is summary of values & colours by factor, used for legend
    vvo.setFactorValuesToNames(factorValueMaps); // this is list of maps for each sample
}

From source file:com.gtwm.pb.model.manageSchema.DatabaseDefn.java

public void removeTable(SessionDataInfo sessionData, HttpServletRequest request, TableInfo tableToRemove,
        Connection conn) throws SQLException, DisallowedException, CantDoThatException,
        TableDependencyException, CodingErrorException, ObjectNotFoundException {
    if (!(this.authManager.getAuthenticator().loggedInUserAllowedTo(request, PrivilegeType.ADMINISTRATE))) {
        throw new DisallowedException(this.authManager.getLoggedInUser(request), PrivilegeType.ADMINISTRATE);
    }//www  .j a  v a2 s  . c  o m
    // Check the table doesn't have any user-added fields
    for (BaseField field : tableToRemove.getFields()) {
        if (!(field.equals(tableToRemove.getPrimaryKey()) || field.getHidden())) {
            throw new CantDoThatException("Please remove all fields before removing the table");
        }
    }
    // Check that it doesn't have any reports
    if (tableToRemove.getReports().size() > 1) {
        throw new CantDoThatException(
                "Please remove reports " + tableToRemove.getReports() + " before removing the table");
    }
    // Get a set of dependent tables. If empty proceed with the deletion of
    // the table, otherwise, raise an exception
    LinkedHashSet<TableInfo> dependentTables = new LinkedHashSet<TableInfo>();
    this.getDependentTables(tableToRemove, dependentTables, request);
    if (dependentTables.size() > 0) {
        LinkedHashSet<BaseReportInfo> dependentReports = new LinkedHashSet<BaseReportInfo>();
        for (TableInfo dependentTable : dependentTables) {
            dependentReports.addAll(dependentTable.getReports());
        }
        throw new TableDependencyException(
                "Unable to remove table - other tables are linked to it, that need to be removed first",
                dependentTables, dependentReports);
    }
    // No dependencies exist so remove the table & its default report:
    BaseReportInfo defaultReport = tableToRemove.getDefaultReport();
    this.removeReportWithoutChecks(sessionData, request, defaultReport, conn);
    // Remove any privileges on the table
    this.getAuthManager().removePrivilegesOnTable(request, tableToRemove);
    this.tableCache.remove(tableToRemove.getInternalTableName());
    // Delete from persistent store
    HibernateUtil.currentSession().delete(tableToRemove);
    try {
        // Delete the table from the relational database.
        // The CASCADE is to drop the related sequence.
        // TODO: replace this with a specific sequence drop
        PreparedStatement statement = conn
                .prepareStatement("DROP TABLE " + tableToRemove.getInternalTableName() + " CASCADE");
        statement.execute();
        statement.close();
    } catch (SQLException sqlex) {
        String errorCode = sqlex.getSQLState();
        if (errorCode.equals("42P01")) {
            logger.warn("Can't delete table " + tableToRemove + " from relational database, it's not there");
            // TODO: review why we're swallowing this error
        } else {
            throw new SQLException(sqlex + ": error code " + errorCode, sqlex);
        }
    }
    this.authManager.getCompanyForLoggedInUser(request).removeTable(tableToRemove);
    UsageLogger usageLogger = new UsageLogger(this.relationalDataSource);
    AppUserInfo user = this.authManager.getUserByUserName(request, request.getRemoteUser());
    usageLogger.logTableSchemaChange(user, tableToRemove, AppAction.REMOVE_TABLE, "");
    UsageLogger.startLoggingThread(usageLogger);
}

From source file:org.codehaus.mojo.jsimport.AbstractImportMojo.java

/**
 * Build up the dependency graph and global symbol table by parsing the project's dependencies.
 * /*  w w w  . j  a va 2s . co  m*/
 * @param scope compile or test.
 * @param fileDependencyGraphModificationTime the time that the dependency graph was updated. Used for file time
 *            comparisons to check the age of them.
 * @param processedFiles an insert-ordered set of files that have been processed.
 * @param targetFolder Where the target files live.
 * @param workFolder Where we can create some long lived information that may be useful to subsequent builds.
 * @param compileWorkFolder Ditto but in the case of testing it points to where the compile working folder is.
 * @return true if the dependency graph has been updated.
 * @throws MojoExecutionException if something bad happens.
 */
private boolean buildDependencyGraphForDependencies(Scope scope, long fileDependencyGraphModificationTime,
        LinkedHashSet<File> processedFiles, File targetFolder, File workFolder, File compileWorkFolder)
        throws MojoExecutionException {
    File targetJsFolder = new File(targetFolder, "js");

    boolean fileDependencyGraphUpdated = false;

    // Determine how we need to filter things both for direct filtering and transitive filtering.

    String scopeStr = (scope == Scope.COMPILE ? Artifact.SCOPE_COMPILE : Artifact.SCOPE_TEST);

    AndArtifactFilter jsArtifactFilter = new AndArtifactFilter();
    jsArtifactFilter.add(new ScopeArtifactFilter(scopeStr));
    jsArtifactFilter.add(new TypeArtifactFilter("js"));

    AndArtifactFilter wwwZipArtifactFilter = new AndArtifactFilter();
    wwwZipArtifactFilter.add(new ScopeArtifactFilter(scopeStr));
    wwwZipArtifactFilter.add(new TypeArtifactFilter("zip"));
    wwwZipArtifactFilter.add(new ArtifactFilter() {
        public boolean include(Artifact artifact) {
            return artifact.hasClassifier() && artifact.getClassifier().equals("www");
        }
    });

    // Determine the artifacts to resolve and associate their transitive dependencies.

    Map<Artifact, LinkedHashSet<Artifact>> directArtifactWithTransitives = new HashMap<Artifact, LinkedHashSet<Artifact>>(
            dependencies.size());

    Set<Artifact> directArtifacts = new HashSet<Artifact>(dependencies.size());
    LinkedHashSet<Artifact> transitiveArtifacts = new LinkedHashSet<Artifact>();

    for (Dependency dependency : dependencies) {
        // Process imports and symbols of this dependencies' transitives
        // first.
        Artifact directArtifact = artifactFactory.createDependencyArtifact(dependency.getGroupId(),
                dependency.getArtifactId(), VersionRange.createFromVersion(dependency.getVersion()),
                dependency.getType(), dependency.getClassifier(), dependency.getScope());

        if (!jsArtifactFilter.include(directArtifact) && !wwwZipArtifactFilter.include(directArtifact)) {
            continue;
        }

        Set<Artifact> artifactsToResolve = new HashSet<Artifact>(1);
        artifactsToResolve.add(directArtifact);

        ArtifactResolutionResult result;
        try {
            result = resolver.resolveTransitively(artifactsToResolve, project.getArtifact(), remoteRepositories,
                    localRepository, artifactMetadataSource);
        } catch (ArtifactResolutionException e) {
            throw new MojoExecutionException("Problem resolving dependencies", e);
        } catch (ArtifactNotFoundException e) {
            throw new MojoExecutionException("Problem resolving dependencies", e);
        }

        // Associate the transitive dependencies with the direct dependency and aggregate all transitives for
        // collection later.

        LinkedHashSet<Artifact> directTransitiveArtifacts = new LinkedHashSet<Artifact>(
                result.getArtifacts().size());
        for (Object o : result.getArtifacts()) {
            Artifact resolvedArtifact = (Artifact) o;
            if (jsArtifactFilter.include(resolvedArtifact) && //
                    !resolvedArtifact.equals(directArtifact)) {
                directTransitiveArtifacts.add(resolvedArtifact);
            }
        }

        directArtifacts.add(directArtifact);
        transitiveArtifacts.addAll(directTransitiveArtifacts);
        directArtifactWithTransitives.put(directArtifact, directTransitiveArtifacts);
    }

    // Resolve the best versions of the transitives to use by asking Maven to collect them.

    Set<Artifact> collectedArtifacts = new HashSet<Artifact>(
            directArtifacts.size() + transitiveArtifacts.size());
    Map<ArtifactId, Artifact> indexedCollectedDependencies = new HashMap<ArtifactId, Artifact>(
            collectedArtifacts.size());
    try {
        // Note that we must pass an insert-order set into the collector. The collector appears to assume that order
        // is significant, even though it is undocumented.
        LinkedHashSet<Artifact> collectableArtifacts = new LinkedHashSet<Artifact>(directArtifacts);
        collectableArtifacts.addAll(transitiveArtifacts);

        ArtifactResolutionResult resolutionResult = artifactCollector.collect(collectableArtifacts,
                project.getArtifact(), localRepository, remoteRepositories, artifactMetadataSource, null, //
                Collections.EMPTY_LIST);
        for (Object o : resolutionResult.getArtifacts()) {
            Artifact collectedArtifact = (Artifact) o;
            collectedArtifacts.add(collectedArtifact);

            // Build up an index of of collected transitive dependencies so that we can we refer back to them as we
            // process the direct dependencies.
            ArtifactId collectedArtifactId = new ArtifactId(collectedArtifact.getGroupId(),
                    collectedArtifact.getArtifactId());
            indexedCollectedDependencies.put(collectedArtifactId, collectedArtifact);
        }

        if (getLog().isDebugEnabled()) {
            getLog().debug("Dependencies collected: " + collectedArtifacts.toString());
        }
    } catch (ArtifactResolutionException e) {
        throw new MojoExecutionException("Cannot collect dependencies", e);
    }

    // Now go through direct artifacts and process their transitives.

    LocalRepositoryCollector localRepositoryCollector = new LocalRepositoryCollector(project, localRepository,
            new File[] {});

    for (Entry<Artifact, LinkedHashSet<Artifact>> entry : directArtifactWithTransitives.entrySet()) {
        Artifact directArtifact = entry.getKey();
        LinkedHashSet<Artifact> directArtifactTransitives = entry.getValue();

        LinkedHashSet<String> transitivesAsImports = new LinkedHashSet<String>(
                directArtifactTransitives.size());

        for (Object o : directArtifactTransitives) {
            Artifact directTransitiveArtifact = (Artifact) o;

            // Get the transitive artifact that Maven decided was the best to use.

            ArtifactId directTransitiveArtifactId = new ArtifactId(directTransitiveArtifact.getGroupId(),
                    directTransitiveArtifact.getArtifactId());
            Artifact transitiveArtifact = indexedCollectedDependencies.get(directTransitiveArtifactId);

            List<File> transitiveArtifactFiles = getArtifactFiles(transitiveArtifact, targetFolder, workFolder,
                    compileWorkFolder, localRepositoryCollector);

            // Only process this dependency if we've not done so
            // already.
            for (File transitiveArtifactFile : transitiveArtifactFiles) {
                if (!processedFiles.contains(transitiveArtifactFile)) {
                    String localRepository = localRepositoryCollector
                            .findLocalRepository(transitiveArtifactFile.getAbsolutePath());
                    if (localRepository != null) {
                        if (processFileForImportsAndSymbols(new File(localRepository), targetJsFolder,
                                transitiveArtifactFile, fileDependencyGraphModificationTime,
                                directArtifactTransitives)) {

                            processedFiles.add(transitiveArtifactFile);

                            fileDependencyGraphUpdated = true;
                        }
                    } else {
                        throw new MojoExecutionException(
                                "Problem determining local repository for transitive file: "
                                        + transitiveArtifactFile);
                    }
                }

                // Add transitives to the artifacts set of dependencies -
                // as if they were @import statements themselves.
                transitivesAsImports.add(transitiveArtifactFile.getPath());
            }
        }

        // Now deal with the pom specified dependency.
        List<File> artifactFiles = getArtifactFiles(directArtifact, targetFolder, workFolder, compileWorkFolder,
                localRepositoryCollector);
        for (File artifactFile : artifactFiles) {
            String artifactPath = artifactFile.getAbsolutePath();

            // Process imports and symbols of this dependency if we've not
            // already done so.
            if (!processedFiles.contains(artifactFile)) {
                String localRepository = localRepositoryCollector
                        .findLocalRepository(artifactFile.getAbsolutePath());
                if (localRepository != null) {
                    if (processFileForImportsAndSymbols(new File(localRepository), targetJsFolder, artifactFile,
                            fileDependencyGraphModificationTime, null)) {
                        processedFiles.add(artifactFile);

                        fileDependencyGraphUpdated = true;
                    }
                } else {
                    throw new MojoExecutionException(
                            "Problem determining local repository for file: " + artifactFile);
                }
            }

            // Add in our transitives to the dependency graph if they're not
            // already there.
            LinkedHashSet<String> existingImports = fileDependencies.get(artifactPath);
            if (existingImports.addAll(transitivesAsImports)) {
                if (getLog().isDebugEnabled()) {
                    getLog().debug("Using transitives as import: " + transitivesAsImports + " for file: "
                            + artifactPath);
                }
                fileDependencyGraphUpdated = true;
            }
        }

    }

    return fileDependencyGraphUpdated;
}

From source file:com.tct.email.mail.store.ImapFolder.java

public void fetchInternal(Message[] messages, FetchProfile fp, MessageRetrievalListener listener)
        throws MessagingException {
    if (messages.length == 0) {
        return;//from   www  . j  a  v  a2 s .c  om
    }
    //[FEATURE]-Add-BEGIN by TSCD.Chao Zhang,04/14/2014,FR 631895(porting from FR 472914)
    int limitedSize = messages[0].getDownloadOptions();
    //[FEATURE]-Add-END by TSCD.Chao Zhang
    //[FEATURE]-Add-BEGIN by TSCD.chao zhang,04/25/2014,FR 631895(porting from  FR487417)
    if (downloadRemainFlag) {
        limitedSize = Utility.ENTIRE_MAIL;
    }
    //[FEATURE]-Add-END by TSCD.Chao Zhang
    checkOpen();
    HashMap<String, Message> messageMap = new HashMap<String, Message>();
    for (Message m : messages) {
        messageMap.put(m.getUid(), m);
    }

    /*
     * Figure out what command we are going to run:
     * FLAGS     - UID FETCH (FLAGS)
     * ENVELOPE  - UID FETCH (INTERNALDATE UID RFC822.SIZE FLAGS BODY.PEEK[
     *                            HEADER.FIELDS (date subject from content-type to cc)])
     * STRUCTURE - UID FETCH (BODYSTRUCTURE)
     * BODY_SANE - UID FETCH (BODY.PEEK[]<0.N>) where N = max bytes returned
     * BODY      - UID FETCH (BODY.PEEK[])
     * Part      - UID FETCH (BODY.PEEK[ID]) where ID = mime part ID
     */

    final LinkedHashSet<String> fetchFields = new LinkedHashSet<String>();

    fetchFields.add(ImapConstants.UID);
    if (fp.contains(FetchProfile.Item.FLAGS)) {
        fetchFields.add(ImapConstants.FLAGS);
    }
    if (fp.contains(FetchProfile.Item.ENVELOPE)) {
        fetchFields.add(ImapConstants.INTERNALDATE);
        fetchFields.add(ImapConstants.RFC822_SIZE);
        fetchFields.add(ImapConstants.FETCH_FIELD_HEADERS);
    }
    if (fp.contains(FetchProfile.Item.STRUCTURE)) {
        fetchFields.add(ImapConstants.BODYSTRUCTURE);
    }

    if (fp.contains(FetchProfile.Item.BODY_SANE)) {
        fetchFields.add(ImapConstants.FETCH_FIELD_BODY_PEEK_SANE);
    }
    if (fp.contains(FetchProfile.Item.BODY)) {
        fetchFields.add(ImapConstants.FETCH_FIELD_BODY_PEEK);
    }

    // TODO Why are we only fetching the first part given?
    final Part fetchPart = fp.getFirstPart();
    if (fetchPart != null) {
        final String[] partIds = fetchPart.getHeader(MimeHeader.HEADER_ANDROID_ATTACHMENT_STORE_DATA);
        // TODO Why can a single part have more than one Id? And why should we only fetch
        // the first id if there are more than one?
        if (partIds != null) {
            //[FEATURE]-Add-BEGIN by TSCD.Chao Zhang,04/14/2014,FR 631895(porting from FR 472914)
            String fetchFieldCommand = ImapConstants.FETCH_FIELD_BODY_PEEK_BARE + "[" + partIds[0] + "]";
            if (limitedSize != Utility.ENTIRE_MAIL
                    && fetchPart.getMimeType().contains(ImapConstants.TEXT.toLowerCase())) {
                fetchFieldCommand = fetchFieldCommand + "<0." + limitedSize + ">";
            }
            fetchFields.add(fetchFieldCommand);
            //[FEATURE]-Add-END by TSCD.Chao Zhang
        }
    }

    try {
        mConnection.sendCommand(String.format(Locale.US, ImapConstants.UID_FETCH + " %s (%s)",
                ImapStore.joinMessageUids(messages),
                Utility.combine(fetchFields.toArray(new String[fetchFields.size()]), ' ')), false);
        ImapResponse response;
        do {
            response = null;
            try {
                // TS: Gantao 2015-12-07 EMAIL BUGFIX_1020377 MOD_S
                //set ui callback when network downloading, update progress bar when fetching
                // attachment from server.
                //response = mConnection.readResponse();
                response = mConnection.readResponse(listener);
                // TS: Gantao 2015-12-07 EMAIL BUGFIX_1020377 MOD_S

                if (!response.isDataResponse(1, ImapConstants.FETCH)) {
                    continue; // Ignore
                }
                final ImapList fetchList = response.getListOrEmpty(2);
                final String uid = fetchList.getKeyedStringOrEmpty(ImapConstants.UID).getString();
                if (TextUtils.isEmpty(uid))
                    continue;

                ImapMessage message = (ImapMessage) messageMap.get(uid);
                if (message == null)
                    continue;

                if (fp.contains(FetchProfile.Item.FLAGS)) {
                    final ImapList flags = fetchList.getKeyedListOrEmpty(ImapConstants.FLAGS);
                    for (int i = 0, count = flags.size(); i < count; i++) {
                        final ImapString flag = flags.getStringOrEmpty(i);
                        if (flag.is(ImapConstants.FLAG_DELETED)) {
                            message.setFlagInternal(Flag.DELETED, true);
                        } else if (flag.is(ImapConstants.FLAG_ANSWERED)) {
                            message.setFlagInternal(Flag.ANSWERED, true);
                        } else if (flag.is(ImapConstants.FLAG_SEEN)) {
                            message.setFlagInternal(Flag.SEEN, true);
                        } else if (flag.is(ImapConstants.FLAG_FLAGGED)) {
                            message.setFlagInternal(Flag.FLAGGED, true);
                        }
                    }
                }
                if (fp.contains(FetchProfile.Item.ENVELOPE)) {
                    final Date internalDate = fetchList.getKeyedStringOrEmpty(ImapConstants.INTERNALDATE)
                            .getDateOrNull();
                    final int size = fetchList.getKeyedStringOrEmpty(ImapConstants.RFC822_SIZE)
                            .getNumberOrZero();
                    final String header = fetchList
                            .getKeyedStringOrEmpty(ImapConstants.BODY_BRACKET_HEADER, true).getString();

                    message.setInternalDate(internalDate);
                    message.setSize(size);
                    message.parse(Utility.streamFromAsciiString(header));
                }
                if (fp.contains(FetchProfile.Item.STRUCTURE)) {
                    ImapList bs = fetchList.getKeyedListOrEmpty(ImapConstants.BODYSTRUCTURE);
                    if (!bs.isEmpty()) {
                        try {
                            parseBodyStructure(bs, message, ImapConstants.TEXT);
                        } catch (MessagingException e) {
                            if (Logging.LOGD) {
                                LogUtils.v(Logging.LOG_TAG, e, "Error handling message");
                            }
                            message.setBody(null);
                        }
                    }
                }
                if (fp.contains(FetchProfile.Item.BODY) || fp.contains(FetchProfile.Item.BODY_SANE)) {
                    // Body is keyed by "BODY[]...".
                    // Previously used "BODY[..." but this can be confused with "BODY[HEADER..."
                    // TODO Should we accept "RFC822" as well??
                    ImapString body = fetchList.getKeyedStringOrEmpty("BODY[]", true);
                    InputStream bodyStream = body.getAsStream();
                    message.parse(bodyStream);
                }
                if (fetchPart != null) {
                    InputStream bodyStream = fetchList.getKeyedStringOrEmpty("BODY[", true).getAsStream();
                    String encodings[] = fetchPart.getHeader(MimeHeader.HEADER_CONTENT_TRANSFER_ENCODING);

                    String contentTransferEncoding = null;
                    if (encodings != null && encodings.length > 0) {
                        contentTransferEncoding = encodings[0];
                    } else {
                        // According to http://tools.ietf.org/html/rfc2045#section-6.1
                        // "7bit" is the default.
                        contentTransferEncoding = "7bit";
                    }

                    try {
                        // TODO Don't create 2 temp files.
                        // decodeBody creates BinaryTempFileBody, but we could avoid this
                        // if we implement ImapStringBody.
                        // (We'll need to share a temp file.  Protect it with a ref-count.)
                        fetchPart.setBody(
                                decodeBody(bodyStream, contentTransferEncoding, fetchPart.getSize(), listener));
                    } catch (Exception e) {
                        // TODO: Figure out what kinds of exceptions might actually be thrown
                        // from here. This blanket catch-all is because we're not sure what to
                        // do if we don't have a contentTransferEncoding, and we don't have
                        // time to figure out what exceptions might be thrown.
                        LogUtils.e(Logging.LOG_TAG, "Error fetching body %s", e);
                    }
                }

                if (listener != null) {
                    listener.messageRetrieved(message);
                }
            } finally {
                destroyResponses();
            }
        } while (!response.isTagged());
    } catch (IOException ioe) {
        throw ioExceptionHandler(mConnection, ioe);
    }
}

From source file:org.sakaiproject.evaluation.logic.EvalAuthoringServiceImpl.java

public Long[] copyTemplateItems(Long[] templateItemIds, String ownerId, boolean hidden, Long toTemplateId,
        boolean includeChildren) {
    if (ownerId == null || ownerId.length() == 0) {
        throw new IllegalArgumentException("Invalid ownerId, cannot be null or empty string");
    }/*ww w .  jav a  2  s. c o  m*/
    if (templateItemIds == null || templateItemIds.length == 0) {
        throw new IllegalArgumentException("Invalid templateItemIds array, cannot be null or empty");
    }

    templateItemIds = ArrayUtils.unique(templateItemIds);
    EvalTemplate toTemplate = null;
    if (toTemplateId != null) {
        toTemplate = getTemplateById(toTemplateId);
        if (toTemplate == null) {
            throw new IllegalArgumentException(
                    "Invalid toTemplateId, cannot find the template by this id: " + toTemplateId);
        }
    }

    List<EvalTemplateItem> templateItemsList = dao.findBySearch(EvalTemplateItem.class,
            new Search("id", templateItemIds));
    if (templateItemsList.size() != templateItemIds.length) {
        throw new IllegalArgumentException("Invalid templateItemIds in array: " + templateItemIds);
    }

    // now we check that copying into the originating template is correct and ensure the toTemplate is set
    if (toTemplate == null) {
        // all templateItems must be from the same template if this is the case
        for (EvalTemplateItem templateItem : templateItemsList) {
            Long templateId = templateItem.getTemplate().getId();
            if (toTemplate == null) {
                toTemplate = getTemplateById(templateId);
            } else {
                if (!toTemplate.getId().equals(templateId)) {
                    throw new IllegalArgumentException(
                            "All templateItems must be from the same template when doing a copy within a template, "
                                    + "if you want to copy templateItems from multiple templates into the same templates they are currently in you must "
                                    + "do it in batches where each set if from one template");
                }
            }
        }
    }

    // sort the list of template items
    templateItemsList = TemplateItemUtils.orderTemplateItems(templateItemsList, false);

    int itemCount = 1; // start at display order 1
    if (toTemplateId == null && toTemplate != null) {
        // copying inside one template so start at the item count + 1
        // get the count of items in the destination template so we know where to start displayOrder from
        itemCount = getItemCountForTemplate(toTemplate.getId()) + 1;
    }

    /* http://bugs.sakaiproject.org/jira/browse/EVALSYS-689
     * need to track the copied items and scales to avoid copying them more than once
     */
    LinkedHashSet<EvalTemplateItem> copiedTemplateItems = new LinkedHashSet<>(templateItemsList.size());

    // shallow copy all block parents first so we can know their new IDs, then later we will update them
    List<EvalTemplateItem> parentItems = TemplateItemUtils.getParentItems(templateItemsList);
    HashMap<Long, EvalTemplateItem> parentIdToCopy = new HashMap<>(parentItems.size());
    if (!parentItems.isEmpty()) {
        for (EvalTemplateItem original : parentItems) {
            Long originalBlockParentId = original.getId();
            List<EvalTemplateItem> childItems = TemplateItemUtils.getChildItems(templateItemsList,
                    originalBlockParentId);
            if (childItems.size() > 0) {
                // only copy this if it has children, lone parents do not get copied
                EvalTemplateItem copy = copyTemplateItem(original, toTemplate, ownerId, hidden);
                parentIdToCopy.put(originalBlockParentId, copy);
            }
        }
        HashSet<EvalTemplateItem> parentItemsToSave = new HashSet<>(parentIdToCopy.values());
        dao.saveSet(parentItemsToSave);
    }

    // check for block items
    List<EvalTemplateItem> nonChildItems = TemplateItemUtils.getNonChildItems(templateItemsList);

    // iterate though in display order and copy the template items
    int displayOrder = 0;
    for (EvalTemplateItem original : nonChildItems) {
        templateItemsList.remove(original); // take this out of the list
        if (TemplateItemUtils.isBlockParent(original)) {
            // this is a block parent so copy it and its children
            Long originalBlockParentId = original.getId();
            if (parentIdToCopy.containsKey(originalBlockParentId)) {
                EvalTemplateItem copyParent = parentIdToCopy.get(originalBlockParentId);
                copyParent.setDisplayOrder(itemCount + displayOrder); // fix up display order
                copyParent.setBlockId(null);
                copyParent.setBlockParent(true);
                //dao.save(copyParent);
                copiedTemplateItems.add(copyParent);
                Long blockParentId = copyParent.getId();

                // loop through and copy all the children and assign them to the parent
                List<EvalTemplateItem> childItems = TemplateItemUtils.getChildItems(templateItemsList,
                        originalBlockParentId);
                for (int j = 0; j < childItems.size(); j++) {
                    EvalTemplateItem child = childItems.get(j);
                    templateItemsList.remove(child); // take this out of the list
                    // copy the child item
                    EvalTemplateItem copy = copyTemplateItem(child, toTemplate, ownerId, hidden);
                    copy.setDisplayOrder(j); // fix up display order
                    copy.setBlockId(blockParentId);
                    copy.setBlockParent(false);
                    //dao.save(copy);
                    copiedTemplateItems.add(copy);
                }
            }
        } else {
            // not a block parent
            EvalTemplateItem copy = copyTemplateItem(original, toTemplate, ownerId, hidden);
            copy.setDisplayOrder(itemCount + displayOrder); // fix up display order
            //dao.save(copy);
            copiedTemplateItems.add(copy);
        }
        displayOrder++;
    }

    // now copy any remaining orphaned block children into normal items
    for (EvalTemplateItem original : templateItemsList) {
        displayOrder++;
        EvalTemplateItem copy = copyTemplateItem(original, toTemplate, ownerId, hidden);
        copy.setDisplayOrder(itemCount + displayOrder); // fix up display order
        //dao.save(copy);
        copiedTemplateItems.add(copy);
    }

    if (includeChildren) {
        // make a copy of all items and put them into the TIs to replace the originals
        HashSet<Long> itemIdSet = new HashSet<>();
        for (EvalTemplateItem eti : copiedTemplateItems) {
            if (eti.getItem() != null) {
                Long itemId = eti.getItem().getId();
                itemIdSet.add(itemId);
            }
        }
        Long[] itemIds = itemIdSet.toArray(new Long[itemIdSet.size()]);
        // do the items copy
        Set<EvalItem> copiedItems = copyItemsInternal(itemIds, ownerId, hidden, includeChildren);
        HashMap<Long, EvalItem> originalIdToCopy = new HashMap<>(copiedItems.size());
        for (EvalItem evalItem : copiedItems) {
            originalIdToCopy.put(evalItem.getCopyOf(), evalItem);
        }
        // insert the copied items into the copied template items (update the foreign keys when we save)
        for (EvalTemplateItem eti : copiedTemplateItems) {
            if (eti.getItem() != null) {
                Long itemId = eti.getItem().getId(); // original id
                EvalItem copy = originalIdToCopy.get(itemId);
                if (copy != null) {
                    eti.setItem(copy);
                }
            }
        }
    }
    // save the template items
    dao.saveSet(copiedTemplateItems);

    Long[] copiedIds = new Long[copiedTemplateItems.size()];
    int counter = 0;
    for (EvalTemplateItem copiedTemplateItem : copiedTemplateItems) {
        copiedIds[counter] = copiedTemplateItem.getId();
        counter++;
    }
    return copiedIds;
}

From source file:org.pentaho.di.ui.spoon.Spoon.java

public void openFile(boolean importfile) {
    SpoonPerspective activePerspective = SpoonPerspectiveManager.getInstance().getActivePerspective();

    // In case the perspective wants to handle open/save itself, let it...
    ////from w  w  w.  j  a v a  2 s . c o  m
    if (!importfile) {
        if (activePerspective instanceof SpoonPerspectiveOpenSaveInterface) {
            ((SpoonPerspectiveOpenSaveInterface) activePerspective).open();
            return;
        }
    }

    String activePerspectiveId = activePerspective.getId();
    boolean etlPerspective = activePerspectiveId.equals(MainSpoonPerspective.ID);

    if (rep == null || importfile || !etlPerspective) { // Load from XML

        FileDialog dialog = new FileDialog(shell, SWT.OPEN);

        LinkedHashSet<String> extensions = new LinkedHashSet<String>();
        LinkedHashSet<String> extensionNames = new LinkedHashSet<String>();
        StringBuilder allExtensions = new StringBuilder();
        for (FileListener l : fileListeners) {
            for (String ext : l.getSupportedExtensions()) {
                extensions.add("*." + ext);
                allExtensions.append("*.").append(ext).append(";");
            }
            Collections.addAll(extensionNames, l.getFileTypeDisplayNames(Locale.getDefault()));
        }
        extensions.add("*");
        extensionNames.add(BaseMessages.getString(PKG, "Spoon.Dialog.OpenFile.AllFiles"));

        String[] exts = new String[extensions.size() + 1];
        exts[0] = allExtensions.toString();
        System.arraycopy(extensions.toArray(new String[extensions.size()]), 0, exts, 1, extensions.size());

        String[] extNames = new String[extensionNames.size() + 1];
        extNames[0] = BaseMessages.getString(PKG, "Spoon.Dialog.OpenFile.AllTypes");
        System.arraycopy(extensionNames.toArray(new String[extensionNames.size()]), 0, extNames, 1,
                extensionNames.size());

        dialog.setFilterExtensions(exts);

        setFilterPath(dialog);
        String filename = dialog.open();
        if (filename != null) {

            if (importfile) {
                if (activePerspective instanceof SpoonPerspectiveOpenSaveInterface) {
                    ((SpoonPerspectiveOpenSaveInterface) activePerspective).importFile(filename);
                    return;
                }
            }

            lastDirOpened = dialog.getFilterPath();
            openFile(filename, importfile);
        }
    } else {
        SelectObjectDialog sod = new SelectObjectDialog(shell, rep);
        if (sod.open() != null) {
            RepositoryObjectType type = sod.getObjectType();
            String name = sod.getObjectName();
            RepositoryDirectoryInterface repDir = sod.getDirectory();

            // Load a transformation
            if (RepositoryObjectType.TRANSFORMATION.equals(type)) {
                TransLoadProgressDialog tlpd = new TransLoadProgressDialog(shell, rep, name, repDir, null); // Loads
                // the
                // last
                // version
                TransMeta transMeta = tlpd.open();
                sharedObjectsFileMap.put(transMeta.getSharedObjects().getFilename(),
                        transMeta.getSharedObjects());
                setTransMetaVariables(transMeta);

                if (transMeta != null) {
                    if (log.isDetailed()) {
                        log.logDetailed(BaseMessages.getString(PKG, "Spoon.Log.LoadToTransformation", name,
                                repDir.getName()));
                    }
                    props.addLastFile(LastUsedFile.FILE_TYPE_TRANSFORMATION, name, repDir.getPath(), true,
                            rep.getName());
                    addMenuLast();
                    transMeta.clearChanged();
                    // transMeta.setFilename(name); // Don't do it, it's a bad idea!
                    addTransGraph(transMeta);
                }
                refreshGraph();
                refreshTree();
            } else if (RepositoryObjectType.JOB.equals(type)) {
                // Load a job
                JobLoadProgressDialog jlpd = new JobLoadProgressDialog(shell, rep, name, repDir, null); // Loads
                // the last version
                JobMeta jobMeta = jlpd.open();
                sharedObjectsFileMap.put(jobMeta.getSharedObjects().getFilename(), jobMeta.getSharedObjects());
                setJobMetaVariables(jobMeta);
                if (jobMeta != null) {
                    props.addLastFile(LastUsedFile.FILE_TYPE_JOB, name, repDir.getPath(), true, rep.getName());
                    saveSettings();
                    addMenuLast();
                    addJobGraph(jobMeta);
                }
                refreshGraph();
                refreshTree();
            }
        }
    }
}

From source file:com.gtwm.pb.model.manageData.DataManagement.java

public int importCSV(HttpServletRequest request, TableInfo table, boolean updateExistingRecords,
        BaseField recordIdentifierField, boolean generateRowIds, char separator, char quotechar,
        int numHeaderLines, boolean useRelationDisplayValues, boolean importSequenceValues,
        boolean requireExactRelationMatches, boolean trim, boolean merge, List<FileItem> multipartItems,
        String csvContent) throws SQLException, InputRecordException, IOException, CantDoThatException,
        ObjectNotFoundException, DisallowedException, CodingErrorException {
    if (!FileUpload.isMultipartContent(new ServletRequestContext(request))) {
        if (csvContent == null) {
            throw new CantDoThatException(
                    "To import CSV content, a file must be uploaded (form posted as multi-part) or csv_content specified");
        }/*w  ww.  ja  v  a2  s . c  o m*/
    }
    int numImportedRecords = 0;
    // get field set to import into. LinkedHashSet to ensure order is
    // retained so the right values are imported into the right fields
    LinkedHashSet<BaseField> fields = new LinkedHashSet<BaseField>(table.getFields());
    // if row IDs aren't included in the data to import, remove ID from the
    // field set
    BaseField primaryKey = table.getPrimaryKey();
    if (recordIdentifierField == null) {
        recordIdentifierField = primaryKey;
    }
    if (generateRowIds || (updateExistingRecords && !recordIdentifierField.equals(primaryKey))) {
        fields.remove(primaryKey);
    }
    Map<RelationField, Map<String, String>> relationLookups = new HashMap<RelationField, Map<String, String>>();
    // Remove fields which shouldn't be modified during the import
    // For serial fields, if we need to set serial values explicitly, this
    // will have to be dealt with later
    for (BaseField field : table.getFields()) {
        if (field instanceof SequenceField && (!field.equals(primaryKey)) && (!importSequenceValues)) {
            fields.remove(field);
        } else if (field.getHidden()) {
            if (field.getFieldName().equals(HiddenFields.VIEW_COUNT.getFieldName())
                    || field.getFieldName().equals(HiddenFields.COMMENTS_FEED.getFieldName())) {
                fields.remove(field);
            } else if (updateExistingRecords) {
                if (field.getFieldName().equals(HiddenFields.DATE_CREATED.getFieldName())
                        || field.getFieldName().equals(HiddenFields.CREATED_BY.getFieldName())) {
                    fields.remove(field);
                }
            }
        } else if (!field.getFieldCategory().savesData()) {
            fields.remove(field);
        }
        // Also, if importing relations by display value, look up
        // display/internal value mappings
        if (useRelationDisplayValues && field instanceof RelationField) {
            Map<String, String> displayToInternalValue = ((RelationFieldDefn) field).getItems(true, false);
            relationLookups.put((RelationField) field, displayToInternalValue);
        }
    }
    // Prepare SQL
    String insertSQLCode = null;
    String updateSQLCode = null;
    String logCreationSQLCode = null;
    // If updating, we'll need a record ID value. Depending on what the
    // identifier field is, this could be one of a couple of different types
    String recordIdentifierString = null;
    Integer recordIdentifierInteger = null;
    int recordIdentifierFieldNum = 0;
    DatabaseFieldType identifierFieldDbType = null;
    if (updateExistingRecords) {
        identifierFieldDbType = recordIdentifierField.getDbType();
        if (!identifierFieldDbType.equals(DatabaseFieldType.VARCHAR)
                && !identifierFieldDbType.equals(DatabaseFieldType.INTEGER)
                && !identifierFieldDbType.equals(DatabaseFieldType.SERIAL)) {
            throw new CantDoThatException("The record identifier field has to be text or a whole number, "
                    + recordIdentifierField + " is a " + identifierFieldDbType);
        }
        updateSQLCode = "UPDATE " + table.getInternalTableName() + " SET ";
        int fieldNum = 0;
        for (BaseField field : fields) {
            fieldNum += 1;
            if (merge) {
                // Update database only if there's a non-null value from the
                // spreadsheet
                updateSQLCode += field.getInternalFieldName() + " = COALESCE(?," + field.getInternalFieldName()
                        + "), ";
            } else {
                updateSQLCode += field.getInternalFieldName() + " = ?, ";
            }
            if (field.equals(recordIdentifierField)) {
                recordIdentifierFieldNum = fieldNum;
            }
        }
        if (recordIdentifierFieldNum == 0) {
            throw new CantDoThatException("Can't find the field specified as record identifier ("
                    + recordIdentifierField + ") in the list of table fields " + fields + " in table " + table);
        }
        updateSQLCode = updateSQLCode.substring(0, updateSQLCode.length() - 2);
        updateSQLCode += " WHERE " + recordIdentifierField.getInternalFieldName() + "=?";
        logCreationSQLCode = "UPDATE " + table.getInternalTableName() + " SET "
                + table.getField(HiddenFields.DATE_CREATED.getFieldName()).getInternalFieldName() + "=?, "
                + table.getField(HiddenFields.CREATED_BY.getFieldName()).getInternalFieldName() + "=? WHERE "
                + primaryKey.getInternalFieldName() + "=?";
    }
    insertSQLCode = "INSERT INTO " + table.getInternalTableName() + "(";
    String placeholders = "";
    for (BaseField field : fields) {
        insertSQLCode += field.getInternalFieldName() + ", ";
        placeholders += "?, ";
    }
    placeholders = placeholders.substring(0, placeholders.length() - 2);
    insertSQLCode = insertSQLCode.substring(0, insertSQLCode.length() - 2) + ") VALUES (" + placeholders + ")";
    // Find content to import
    Reader inputStreamReader = null;
    if (csvContent != null) {
        inputStreamReader = new StringReader(csvContent);
    } else {
        for (FileItem item : multipartItems) {
            // if item is a file
            if (!item.isFormField()) {
                if (item.getName().toLowerCase().endsWith(".xls")) {
                    throw new CantDoThatException(
                            "You need to upload as a CSV to import, Excel files can't be imported directly");
                }
                inputStreamReader = new InputStreamReader(item.getInputStream());
                break;
            }
        }
    }
    if (inputStreamReader == null) {
        throw new CantDoThatException("No file uploaded");
    }
    CSVReader csvReader = new CSVReader(inputStreamReader, separator, quotechar, numHeaderLines);
    // returns a list of String arrays
    List<String[]> csvLines = (List<String[]>) csvReader.readAll();
    // do db inserts
    Connection conn = null;
    PreparedStatement statement = null;
    // backupInsertStatement is for when an update returns 0 rows affected,
    // i.e. there's no matching row. In this case, do an insert
    PreparedStatement backupInsertStatement = null;
    PreparedStatement logCreationStatement = null;
    // These two variables used in exception handling
    int importLine = 0;
    BaseField fieldImported = null;
    Timestamp importTime = new Timestamp(System.currentTimeMillis());
    AppUserInfo loggedInUser = authManager.getUserByUserName(request, request.getRemoteUser());
    String fullname = loggedInUser.getForename() + " " + loggedInUser.getSurname() + " ("
            + loggedInUser.getUserName() + ")";
    try {
        conn = this.dataSource.getConnection();
        conn.setAutoCommit(false);
        if (updateExistingRecords) {
            statement = conn.prepareStatement(updateSQLCode);
            backupInsertStatement = conn.prepareStatement(insertSQLCode);
            logCreationStatement = conn.prepareStatement(logCreationSQLCode);
        } else {
            statement = conn.prepareStatement(insertSQLCode);
        }
        CSVLINE: for (String[] csvLineArray : csvLines) {
            // convert to an object rather than a primitive array -
            // easier to work with
            List<String> lineValues = Arrays.asList(csvLineArray);
            importLine++;
            // skip blank lines
            if (lineValues.size() == 1) {
                if (lineValues.get(0).length() == 0) {
                    continue CSVLINE;
                }
            }
            int fieldNum = 0;
            for (BaseField field : fields) {
                fieldImported = field;
                fieldNum++;
                if (field.getHidden()) {
                    String fieldName = field.getFieldName();
                    if (fieldName.equals(HiddenFields.LOCKED.getFieldName())) {
                        statement.setBoolean(fieldNum, false);
                        if (updateExistingRecords) {
                            backupInsertStatement.setBoolean(fieldNum, false);
                        }
                    } else if (fieldName.equals(HiddenFields.DATE_CREATED.getFieldName())
                            || fieldName.equals(HiddenFields.LAST_MODIFIED.getFieldName())) {
                        statement.setTimestamp(fieldNum, importTime);
                        if (updateExistingRecords) {
                            backupInsertStatement.setTimestamp(fieldNum, importTime);
                        }
                    } else if (fieldName.equals(HiddenFields.CREATED_BY.getFieldName())
                            || fieldName.equals(HiddenFields.MODIFIED_BY.getFieldName())) {
                        statement.setString(fieldNum, fullname);
                        if (updateExistingRecords) {
                            backupInsertStatement.setString(fieldNum, fullname);
                        }
                    }
                } else if (fieldNum > lineValues.size()) {
                    // booleans have a not null constraint
                    if (field.getDbType().equals(Types.BOOLEAN)) {
                        statement.setBoolean(fieldNum, false);
                        if (updateExistingRecords) {
                            backupInsertStatement.setBoolean(fieldNum, false);
                        }
                    } else {
                        statement.setNull(fieldNum, Types.NULL);
                        if (updateExistingRecords) {
                            backupInsertStatement.setNull(fieldNum, Types.NULL);
                        }
                    }
                } else {
                    String lineValue = lineValues.get(fieldNum - 1);
                    if (lineValue != null) {
                        if (trim) {
                            lineValue = lineValue.trim();
                        }
                        if (lineValue.equals("")) {
                            // booleans have a not null constraint
                            if (field.getDbType().equals(Types.BOOLEAN)) {
                                statement.setBoolean(fieldNum, false);
                                if (updateExistingRecords) {
                                    backupInsertStatement.setBoolean(fieldNum, false);
                                }
                            } else {
                                statement.setNull(fieldNum, Types.NULL);
                                if (updateExistingRecords) {
                                    backupInsertStatement.setNull(fieldNum, Types.NULL);
                                }
                            }
                        } else {
                            if ((field instanceof FileField) && (generateRowIds)) {
                                throw new CantDoThatException(
                                        "Cannot generate row ids when importing file names. See line "
                                                + importLine + ", field '" + field.getFieldName()
                                                + "' with value '" + lineValue + "'");
                            }
                            switch (field.getDbType()) {
                            case VARCHAR:
                                statement.setString(fieldNum, lineValue);
                                if (updateExistingRecords) {
                                    backupInsertStatement.setString(fieldNum, lineValue);
                                    if (field.equals(recordIdentifierField)) {
                                        recordIdentifierString = lineValue;
                                    }
                                }
                                break;
                            case TIMESTAMP:
                                // deal with month and year
                                // resolution dates exported
                                if (lineValue.matches("^[a-zA-Z]{3}\\s\\d{2,4}$")) {
                                    lineValue = "01 " + lineValue;
                                } else if (lineValue.matches("^\\d{2,4}")) {
                                    lineValue = "01 Jan " + lineValue;
                                }
                                try {
                                    Calendar calValue = CalendarParser.parse(lineValue,
                                            CalendarParser.DD_MM_YY);
                                    statement.setTimestamp(fieldNum, new Timestamp(calValue.getTimeInMillis()));
                                    if (updateExistingRecords) {
                                        backupInsertStatement.setTimestamp(fieldNum,
                                                new Timestamp(calValue.getTimeInMillis()));
                                    }
                                } catch (CalendarParserException cpex) {
                                    throw new InputRecordException("Error importing line " + importLine
                                            + ", field " + field + ": " + cpex.getMessage(), field, cpex);
                                }
                                break;
                            case FLOAT:
                                lineValue = lineValue.trim().replaceAll("[^\\d\\.\\+\\-eE]", "");
                                statement.setDouble(fieldNum, Double.valueOf(lineValue));
                                if (updateExistingRecords) {
                                    backupInsertStatement.setDouble(fieldNum, Double.valueOf(lineValue));
                                }
                                break;
                            case INTEGER:
                                if ((field instanceof RelationField) && (useRelationDisplayValues)) {
                                    // find key value for display value
                                    RelationField relationField = (RelationField) field;
                                    Map<String, String> valueKeyMap = relationLookups.get(relationField);
                                    String internalValueString = valueKeyMap.get(lineValue);
                                    if (internalValueString == null) {
                                        if (!requireExactRelationMatches) {
                                            // A very basic fuzzy matching
                                            // algorithm
                                            String potentialDisplayValue = null;
                                            String lineValueLowerCase = lineValue.toLowerCase();
                                            FUZZYMATCH: for (Map.Entry<String, String> entry : valueKeyMap
                                                    .entrySet()) {
                                                potentialDisplayValue = entry.getKey();
                                                if (potentialDisplayValue.toLowerCase()
                                                        .contains(lineValueLowerCase)) {
                                                    internalValueString = entry.getValue();
                                                    break FUZZYMATCH;
                                                }
                                            }
                                        }
                                        if (internalValueString == null) {
                                            throw new CantDoThatException("Error importing line " + importLine
                                                    + ", field " + relationField + ": Can't find a related '"
                                                    + relationField.getRelatedTable() + "' for "
                                                    + relationField.getDisplayField() + " '" + lineValue
                                                    + "'. ");
                                        }
                                    }
                                    int keyValue = Integer.valueOf(internalValueString);
                                    statement.setInt(fieldNum, keyValue);
                                    if (updateExistingRecords) {
                                        backupInsertStatement.setInt(fieldNum, keyValue);
                                        if (field.equals(recordIdentifierField)) {
                                            recordIdentifierInteger = keyValue;
                                        }
                                    }
                                } else {
                                    lineValue = lineValue.trim().replaceAll("[^\\d\\.\\+\\-eE]", "");
                                    int keyValue = Integer.valueOf(lineValue);
                                    statement.setInt(fieldNum, keyValue);
                                    if (updateExistingRecords) {
                                        backupInsertStatement.setInt(fieldNum, keyValue);
                                        if (field.equals(recordIdentifierField)) {
                                            recordIdentifierInteger = keyValue;
                                        }
                                    }
                                }
                                break;
                            case SERIAL:
                                lineValue = lineValue.trim().replaceAll("[^\\d\\.\\+\\-eE]", "");
                                int keyValue = Integer.valueOf(lineValue);
                                statement.setInt(fieldNum, keyValue);
                                if (updateExistingRecords) {
                                    backupInsertStatement.setInt(fieldNum, keyValue);
                                    if (field.equals(recordIdentifierField)) {
                                        recordIdentifierInteger = keyValue;
                                    }
                                }
                                break;
                            case BOOLEAN:
                                boolean filterValueIsTrue = Helpers.valueRepresentsBooleanTrue(lineValue);
                                statement.setBoolean(fieldNum, filterValueIsTrue);
                                if (updateExistingRecords) {
                                    backupInsertStatement.setBoolean(fieldNum, filterValueIsTrue);
                                }
                                break;
                            }
                        }
                    } else {
                        // booleans have a not null constraint
                        if (field.getDbType().equals(Types.BOOLEAN)) {
                            statement.setBoolean(fieldNum, false);
                            if (updateExistingRecords) {
                                backupInsertStatement.setBoolean(fieldNum, false);
                            }
                        } else {
                            statement.setNull(fieldNum, Types.NULL);
                            if (updateExistingRecords) {
                                backupInsertStatement.setNull(fieldNum, Types.NULL);
                            }
                        }
                    }
                }
            }
            if (updateExistingRecords) {
                // for potential error messages
                String recordIdentifierDescription = null;
                if (identifierFieldDbType.equals(DatabaseFieldType.INTEGER)
                        || identifierFieldDbType.equals(DatabaseFieldType.SERIAL)) {
                    if (recordIdentifierInteger == null) {
                        throw new InputRecordException(
                                "Can't find a record identifier value at line " + importLine,
                                recordIdentifierField);
                    }
                    recordIdentifierDescription = recordIdentifierField.getFieldName() + " = "
                            + recordIdentifierInteger;
                    // Set the 'WHERE recordIdentifier = ?' clause
                    statement.setInt(fields.size() + 1, recordIdentifierInteger);
                } else {
                    if (recordIdentifierString == null) {
                        throw new InputRecordException(
                                "Can't find a record identifier value at line " + importLine,
                                recordIdentifierField);
                    }
                    recordIdentifierDescription = recordIdentifierField.getFieldName() + " = '"
                            + recordIdentifierString + "'";
                    // Set the 'WHERE recordIdentifier = ?' clause
                    statement.setString(fields.size() + 1, recordIdentifierString);
                }
                int rowsAffected = statement.executeUpdate();
                if (rowsAffected == 0) {
                    // If can't find a match to update, insert a record
                    // instead
                    backupInsertStatement.executeUpdate();
                    // NB Postgres specific code to find Row ID of newly
                    // inserted record, not cross-db compatible
                    String newRowIdSQLCode = "SELECT currval('" + table.getInternalTableName() + "_"
                            + primaryKey.getInternalFieldName() + "_seq')";
                    PreparedStatement newRowIdStatement = conn.prepareStatement(newRowIdSQLCode);
                    ResultSet newRowIdResults = newRowIdStatement.executeQuery();
                    if (newRowIdResults.next()) {
                        int newRowId = newRowIdResults.getInt(1);
                        // Add creation metadata to the new row
                        logCreationStatement.setTimestamp(1, importTime);
                        logCreationStatement.setString(2, fullname);
                        logCreationStatement.setInt(3, newRowId);
                        int creationLogRowsAffected = logCreationStatement.executeUpdate();
                        if (creationLogRowsAffected == 0) {
                            throw new SQLException(
                                    "Unable to update creation metadata of newly inserted record, using query "
                                            + logCreationStatement);
                        }
                    } else {
                        newRowIdResults.close();
                        newRowIdStatement.close();
                        throw new SQLException("Row ID not found for the newly inserted record. '"
                                + newRowIdStatement + "' didn't work");
                    }
                    newRowIdResults.close();
                    newRowIdStatement.close();
                } else if (rowsAffected > 1) {
                    throw new InputRecordException("Error importing line " + importLine
                            + ". The record identifier field " + recordIdentifierDescription
                            + " should match only 1 record in the database but it actually matches "
                            + rowsAffected, recordIdentifierField);
                }
                // reset to null for the next line
                recordIdentifierString = null;
                recordIdentifierInteger = null;
            } else {
                statement.executeUpdate();
            }
            numImportedRecords += 1;
        }
        statement.close();
        if (backupInsertStatement != null) {
            backupInsertStatement.close();
        }
        if (logCreationStatement != null) {
            logCreationStatement.close();
        }
        // reset the primary key ID sequence so new records can be added
        resetSequence((SequenceField) primaryKey, conn);
        // and any other sequence fields
        if (importSequenceValues) {
            for (BaseField field : table.getFields()) {
                if ((!field.equals(primaryKey)) && field instanceof SequenceField) {
                    resetSequence((SequenceField) field, conn);
                }
            }
        }
        // ANALYZE the table after import
        if (numImportedRecords > 1000) {
            Statement analyzeStatement = conn.createStatement();
            analyzeStatement.execute("ANALYZE " + table.getInternalTableName());
            analyzeStatement.close();
        }
        conn.commit();
    } catch (SQLException sqlex) {
        String databaseErrorMessage = Helpers.replaceInternalNames(sqlex.getMessage(),
                table.getDefaultReport());
        logger.warn("Import failed, statement is " + statement);
        logger.warn("Backup insert statement is " + backupInsertStatement);
        String errorMessage = "Error importing CSV line " + importLine;
        if (!fieldImported.getHidden()) {
            errorMessage += ", field '" + fieldImported + "'";
        }
        errorMessage += ": " + databaseErrorMessage;
        throw new InputRecordException(errorMessage, fieldImported, sqlex);
    } catch (NumberFormatException nfex) {
        String causeMessage = nfex.getMessage();
        causeMessage = causeMessage.replaceAll("For input string", "value");
        String errorMessage = "Error parsing number when importing CSV line " + importLine;
        if (!fieldImported.getHidden()) {
            errorMessage += ", field '" + fieldImported + "'";
        }
        errorMessage += ": " + causeMessage;
        throw new InputRecordException(errorMessage, fieldImported, nfex);
    } finally {
        if (conn != null) {
            conn.close();
        }
    }
    this.logLastDataChangeTime(request);
    logLastTableDataChangeTime(table);
    UsageLogger usageLogger = new UsageLogger(this.dataSource);
    String logMessage = "" + numImportedRecords;
    if (updateExistingRecords) {
        logMessage += " records imported";
    } else {
        logMessage += " new records imported";
    }
    if (csvContent != null) {
        logMessage += " from file";
    }
    usageLogger.logDataChange(loggedInUser, table, null, AppAction.CSV_IMPORT, -1, logMessage);
    UsageLogger.startLoggingThread(usageLogger);
    return numImportedRecords;
}