Example usage for java.nio.file StandardCopyOption REPLACE_EXISTING

List of usage examples for java.nio.file StandardCopyOption REPLACE_EXISTING

Introduction

In this page you can find the example usage for java.nio.file StandardCopyOption REPLACE_EXISTING.

Prototype

StandardCopyOption REPLACE_EXISTING

To view the source code for java.nio.file StandardCopyOption REPLACE_EXISTING.

Click Source Link

Document

Replace an existing file if it exists.

Usage

From source file:org.roda.wui.api.controllers.BrowserHelper.java

public static OtherMetadata createOrUpdateOtherMetadataFile(String aipId, String representationId,
        List<String> fileDirectoryPath, String fileId, String type, String fileSuffix, InputStream is)
        throws RequestNotValidException, GenericException, NotFoundException, AuthorizationDeniedException {
    try {//from   ww w  . jav a 2  s .  co  m
        Path tempFile = Files.createTempFile("descriptive", ".tmp");
        Files.copy(is, tempFile, StandardCopyOption.REPLACE_EXISTING);
        ContentPayload payload = new FSPathContentPayload(tempFile);

        return RodaCoreFactory.getModelService().createOrUpdateOtherMetadata(aipId, representationId,
                fileDirectoryPath, fileId, fileSuffix, type, payload, false);
    } catch (IOException e) {
        throw new GenericException("Error creating or updating other metadata");
    }
}

From source file:client.welcome2.java

private void SupplierAddButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_SupplierAddButtonActionPerformed
    int f = 0;//ww w  .  jav a2s  . c  om
    int h = 0;
    sn = SupplierContracdIDText.getText();
    int ans;
    try {
        String sql = "Insert into suppliers (supplier_id,supplier_name,supplier_address,supplier_phone,supplier_email,supplier_contract_id) values(?,?,?,?,?,?)";

        pst = conn.prepareStatement(sql);
        pst.setString(1, SupplierIDText.getText());
        pst.setString(2, SupplierNameText.getText());
        pst.setString(3, SupplierAddressText.getText());
        pst.setString(4, SupplierPhoneText.getText());
        pst.setString(5, SupplierEmailText.getText());
        pst.setString(6, SupplierContracdIDText.getText());

        if (SupplierUploadText.getText().isEmpty()) {
            ans = JOptionPane.showConfirmDialog(null,
                    "Are You Sure You Want To Add a Supplier Without a Contract?", "Warning!",
                    JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE);
            if (ans == 0)
                pst.execute();
            if (ans == 1) {
            }
        }

        else {

            Path dest = Paths.get("src/SupplierContracts/" + sn + ".pdf");
            Path source = Paths.get(filename_supplier);
            Files.copy(source, dest, StandardCopyOption.REPLACE_EXISTING);
            pst.execute();
        }

        JOptionPane.showMessageDialog(null, "Supplier Has Been Added");
    } catch (Exception e) {
        h = 1;

        if (SupplierIDText.getText().isEmpty())
            SupplierIDText.setBackground(Color.red);
        else
            SupplierIDText.setBackground(Color.white);

        if (SupplierNameText.getText().isEmpty())
            SupplierNameText.setBackground(Color.red);
        else
            SupplierNameText.setBackground(Color.white);

        if (SupplierAddressText.getText().isEmpty())
            SupplierAddressText.setBackground(Color.red);
        else
            SupplierAddressText.setBackground(Color.white);

        if (SupplierPhoneText.getText().isEmpty())
            SupplierPhoneText.setBackground(Color.red);
        else
            SupplierPhoneText.setBackground(Color.white);

        if (SupplierEmailText.getText().isEmpty())
            SupplierEmailText.setBackground(Color.red);
        else
            SupplierEmailText.setBackground(Color.white);

        if (SupplierContracdIDText.getText().isEmpty())
            SupplierContracdIDText.setBackground(Color.red);
        else
            SupplierContracdIDText.setBackground(Color.white);

        if (f != 2) {
            JOptionPane.showMessageDialog(null, "The Marked Fields Are Empty\n Please Fill All Fields",
                    "Attention!", JOptionPane.ERROR_MESSAGE);
        } else {
            JOptionPane.showMessageDialog(null, e);
        }
    }
    if (f == 0 && h == 0) {
        SupplierIDText.setText("");
        SupplierNameText.setText("");
        SupplierAddressText.setText("");
        SupplierPhoneText.setText("");
        SupplierEmailText.setText("");
        SupplierContracdIDText.setText("");
        SupplierUploadText.setText("");
    }
}

From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java

/**
 * This method/*from   w  w  w  .j a  va2 s. c  om*/
 * 1) Binary model, either compressed or not. Like well-known Google Model
 * 2) Popular CSV word2vec text format
 * 3) DL4j compressed format
 *
 * Please note: if extended data isn't available, only weights will be loaded instead.
 *
 * @param file
 * @param extendedModel if TRUE, we'll try to load HS states & Huffman tree info, if FALSE, only weights will be loaded
 * @return
 */
public static Word2Vec readWord2VecModel(@NonNull File file, boolean extendedModel) {
    InMemoryLookupTable<VocabWord> lookupTable = new InMemoryLookupTable<>();
    AbstractCache<VocabWord> vocabCache = new AbstractCache<>();
    Word2Vec vec;
    INDArray syn0 = null;
    VectorsConfiguration configuration = new VectorsConfiguration();

    if (!file.exists() || !file.isFile())
        throw new ND4JIllegalStateException("File [" + file.getAbsolutePath() + "] doesn't exist");

    int originalFreq = Nd4j.getMemoryManager().getOccasionalGcFrequency();
    boolean originalPeriodic = Nd4j.getMemoryManager().isPeriodicGcActive();

    if (originalPeriodic)
        Nd4j.getMemoryManager().togglePeriodicGc(false);

    Nd4j.getMemoryManager().setOccasionalGcFrequency(50000);

    // try to load zip format
    try {
        if (extendedModel) {
            log.debug("Trying full model restoration...");
            // this method just loads full compressed model

            if (originalPeriodic)
                Nd4j.getMemoryManager().togglePeriodicGc(true);

            Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);

            return readWord2Vec(file);
        } else {
            log.debug("Trying simplified model restoration...");

            File tmpFileSyn0 = File.createTempFile("word2vec", "syn");
            File tmpFileConfig = File.createTempFile("word2vec", "config");
            // we don't need full model, so we go directly to syn0 file

            ZipFile zipFile = new ZipFile(file);
            ZipEntry syn = zipFile.getEntry("syn0.txt");
            InputStream stream = zipFile.getInputStream(syn);

            Files.copy(stream, Paths.get(tmpFileSyn0.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING);

            // now we're restoring configuration saved earlier
            ZipEntry config = zipFile.getEntry("config.json");
            if (config != null) {
                stream = zipFile.getInputStream(config);

                StringBuilder builder = new StringBuilder();
                try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) {
                    String line;
                    while ((line = reader.readLine()) != null) {
                        builder.append(line);
                    }
                }

                configuration = VectorsConfiguration.fromJson(builder.toString().trim());
            }

            ZipEntry ve = zipFile.getEntry("frequencies.txt");
            if (ve != null) {
                stream = zipFile.getInputStream(ve);
                AtomicInteger cnt = new AtomicInteger(0);
                try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) {
                    String line;
                    while ((line = reader.readLine()) != null) {
                        String[] split = line.split(" ");
                        VocabWord word = new VocabWord(Double.valueOf(split[1]), decodeB64(split[0]));
                        word.setIndex(cnt.getAndIncrement());
                        word.incrementSequencesCount(Long.valueOf(split[2]));

                        vocabCache.addToken(word);
                        vocabCache.addWordToIndex(word.getIndex(), word.getLabel());

                        Nd4j.getMemoryManager().invokeGcOccasionally();
                    }
                }
            }

            List<INDArray> rows = new ArrayList<>();
            // basically read up everything, call vstacl and then return model
            try (Reader reader = new CSVReader(tmpFileSyn0)) {
                AtomicInteger cnt = new AtomicInteger(0);
                while (reader.hasNext()) {
                    Pair<VocabWord, float[]> pair = reader.next();
                    VocabWord word = pair.getFirst();
                    INDArray vector = Nd4j.create(pair.getSecond());

                    if (ve != null) {
                        if (syn0 == null)
                            syn0 = Nd4j.create(vocabCache.numWords(), vector.length());

                        syn0.getRow(cnt.getAndIncrement()).assign(vector);
                    } else {
                        rows.add(vector);

                        vocabCache.addToken(word);
                        vocabCache.addWordToIndex(word.getIndex(), word.getLabel());
                    }

                    Nd4j.getMemoryManager().invokeGcOccasionally();
                }
            } catch (Exception e) {
                throw new RuntimeException(e);
            } finally {
                if (originalPeriodic)
                    Nd4j.getMemoryManager().togglePeriodicGc(true);

                Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);
            }

            if (syn0 == null && vocabCache.numWords() > 0)
                syn0 = Nd4j.vstack(rows);

            if (syn0 == null) {
                log.error("Can't build syn0 table");
                throw new DL4JInvalidInputException("Can't build syn0 table");
            }

            lookupTable = new InMemoryLookupTable.Builder<VocabWord>().cache(vocabCache)
                    .vectorLength(syn0.columns()).useHierarchicSoftmax(false).useAdaGrad(false).build();

            lookupTable.setSyn0(syn0);

            try {
                tmpFileSyn0.delete();
                tmpFileConfig.delete();
            } catch (Exception e) {
                //
            }
        }
    } catch (Exception e) {
        // let's try to load this file as csv file
        try {
            log.debug("Trying CSV model restoration...");

            Pair<InMemoryLookupTable, VocabCache> pair = loadTxt(file);
            lookupTable = pair.getFirst();
            vocabCache = (AbstractCache<VocabWord>) pair.getSecond();
        } catch (Exception ex) {
            // we fallback to trying binary model instead
            try {
                log.debug("Trying binary model restoration...");

                if (originalPeriodic)
                    Nd4j.getMemoryManager().togglePeriodicGc(true);

                Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);

                vec = loadGoogleModel(file, true, true);
                return vec;
            } catch (Exception ey) {
                // try to load without linebreaks
                try {
                    if (originalPeriodic)
                        Nd4j.getMemoryManager().togglePeriodicGc(true);

                    Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);

                    vec = loadGoogleModel(file, true, false);
                    return vec;
                } catch (Exception ez) {
                    throw new RuntimeException(
                            "Unable to guess input file format. Please use corresponding loader directly");
                }
            }
        }
    }

    Word2Vec.Builder builder = new Word2Vec.Builder(configuration).lookupTable(lookupTable).useAdaGrad(false)
            .vocabCache(vocabCache).layerSize(lookupTable.layerSize())

            // we don't use hs here, because model is incomplete
            .useHierarchicSoftmax(false).resetModel(false);

    /*
    Trying to restore TokenizerFactory & TokenPreProcessor
     */

    TokenizerFactory factory = getTokenizerFactory(configuration);
    if (factory != null)
        builder.tokenizerFactory(factory);

    vec = builder.build();

    return vec;
}

From source file:com.ut.healthelink.controller.adminProcessingActivity.java

/**
 * The 'inboundBatchOptions' function will process the batch according to the option submitted by admin
 */// ww  w.j  a  va2s.c o m
@RequestMapping(value = "/inboundBatchOptions", method = RequestMethod.POST)
public @ResponseBody boolean inboundBatchOptions(HttpSession session,
        @RequestParam(value = "tId", required = false) Integer transactionInId,
        @RequestParam(value = "batchId", required = true) Integer batchId, Authentication authentication,
        @RequestParam(value = "batchOption", required = true) String batchOption) throws Exception {

    String strBatchOption = "";
    User userInfo = usermanager.getUserByUserName(authentication.getName());
    batchUploads batchDetails = transactionInManager.getBatchDetails(batchId);

    if (userInfo != null && batchDetails != null) {
        if (batchOption.equalsIgnoreCase("processBatch")) {
            if (batchDetails.getstatusId() == 2) {
                strBatchOption = "Loaded Batch";
                transactionInManager.loadBatch(batchId);
            } else if (batchDetails.getstatusId() == 3 || batchDetails.getstatusId() == 36) {
                strBatchOption = "Processed Batch";
                transactionInManager.processBatch(batchId, false, 0);
            }
        } else if (batchOption.equalsIgnoreCase("cancel")) {
            strBatchOption = "Cancelled Batch";
            transactionInManager.updateBatchStatus(batchId, 4, "startDateTime");
            transactionInManager.updateTransactionStatus(batchId, 0, 0, 31);
            transactionInManager.updateBatchStatus(batchId, 32, "endDateTime");
            //need to cancel targets also
            transactionInManager.updateTranTargetStatusByUploadBatchId(batchId, 0, 31);
            transactionInManager.updateBatchDLStatusByUploadBatchId(batchId, 0, 32, "endDateTime");

        } else if (batchOption.equalsIgnoreCase("reset")) {
            strBatchOption = "Reset Batch";
            //1. Check
            boolean allowBatchClear = transactionInManager.allowBatchClear(batchId);
            if (allowBatchClear) {
                //if ftp or rhapsody, we flag as DNP and move file back to input folder
                if (batchDetails.gettransportMethodId() == 5 || batchDetails.gettransportMethodId() == 3) {
                    transactionInManager.updateBatchStatus(batchId, 4, "startDateTime");

                    strBatchOption = "Reset Batch  - FTP/Rhapsody Reset";

                    //targets could be created already so we need to update the target status by upload batchId 
                    transactionInManager.updateTranTargetStatusByUploadBatchId(batchId, 0, 31);
                    transactionInManager.updateBatchDLStatusByUploadBatchId(batchId, 0, 35, "endDateTime");
                    transactionInManager.updateTransactionStatus(batchId, 0, 0, 31);
                    transactionInManager.updateBatchStatus(batchId, 35, "endDateTime");

                    String fileExt = batchDetails.getoriginalFileName()
                            .substring(batchDetails.getoriginalFileName().lastIndexOf("."));
                    fileSystem fileSystem = new fileSystem();

                    File archiveFile = new File(
                            fileSystem.setPath(archivePath) + batchDetails.getutBatchName() + fileExt);
                    String fileToPath = fileSystem.setPathFromRoot(batchDetails.getOriginalFolder());
                    //we name it ut batch name when move so we know
                    String newFileName = transactionInManager.newFileName(fileToPath,
                            (batchDetails.getutBatchName() + fileExt));
                    File newFile = new File(fileToPath + newFileName);
                    Path source = archiveFile.toPath();
                    Path target = newFile.toPath();
                    Files.copy(source, target);

                } else {

                    transactionInManager.updateBatchStatus(batchId, 4, "");
                    //2. clear
                    boolean cleared = transactionInManager.clearBatch(batchId);

                    //copy archive file back to original folder
                    fileSystem dir = new fileSystem();

                    // we need to move unencoded file back from archive folder and replace current file
                    //we set archive path
                    try {

                        File archiveFile = new File(dir.setPath(archivePath) + batchDetails.getutBatchName()
                                + batchDetails.getoriginalFileName()
                                        .substring(batchDetails.getoriginalFileName().lastIndexOf(".")));
                        Path archive = archiveFile.toPath();
                        File toFile = new File(dir.setPath(batchDetails.getFileLocation())
                                + batchDetails.getoriginalFileName());
                        Path toPath = toFile.toPath();
                        //need to encode file first
                        if (batchDetails.getEncodingId() == 1) {
                            String strEncodedFile = filemanager.encodeFileToBase64Binary(archiveFile);
                            toFile.delete();
                            //we replace file with encoded
                            filemanager.writeFile(toFile.getAbsolutePath(), strEncodedFile);
                        } else { // already encoded
                            Files.copy(archive, toPath, StandardCopyOption.REPLACE_EXISTING);
                        }

                        cleared = true;

                    } catch (Exception ex) {
                        ex.printStackTrace();
                        cleared = false;
                    }

                    if (cleared) {
                        transactionInManager.updateBatchStatus(batchId, 2, "startOver");
                    }
                }
            }
        } else if (batchOption.equalsIgnoreCase("releaseBatch")) {
            strBatchOption = "Released Batch";
            if (batchDetails.getstatusId() == 5) {
                transactionInManager.updateBatchStatus(batchId, 4, "startDateTime");
                //check once again to make sure all transactions are in final status
                if (transactionInManager.getRecordCounts(batchId, Arrays.asList(11, 12, 13, 16), false,
                        false) == 0) {
                    transactionInManager.updateBatchStatus(batchId, 6, "endDateTime");
                } else {
                    transactionInManager.updateBatchStatus(batchId, 5, "endDateTime");
                }
            }
        } else if (batchOption.equalsIgnoreCase("rejectMessage")) {
            strBatchOption = "Rejected Transaction";
            if (batchDetails.getstatusId() == 5) {
                transactionInManager.updateTranStatusByTInId(transactionInId, 13);
            }
        }
    }

    //log user activity
    UserActivity ua = new UserActivity();
    ua.setUserId(userInfo.getId());
    ua.setAccessMethod("POST");
    ua.setPageAccess("/inboundBatchOptions");
    ua.setActivity("Admin - " + strBatchOption);
    ua.setBatchUploadId(batchId);
    if (transactionInId != null) {
        ua.setTransactionInIds(transactionInId.toString());
    }
    usermanager.insertUserLog(ua);
    return true;
}

From source file:com.ut.healthelink.service.impl.transactionOutManagerImpl.java

/** 
 * The 'RhapsodyTargetFile' function will get the Rhapsody details and move the file to the
 * output folder defined in /*from  ww w . j  a va2 s .  c  o  m*/
 *
 * @param batchId The id of the batch to move to Rhapsody folder
 */
private void RhapsodyTargetFile(int batchId, configurationTransport transportDetails) {

    try {

        /* Update the status of the batch to locked */
        transactionOutDAO.updateBatchStatus(batchId, 22);

        List<transactionTarget> targets = transactionOutDAO.getTransactionsByBatchDLId(batchId);

        if (!targets.isEmpty()) {

            for (transactionTarget target : targets) {

                /* Need to update the uploaded batch status */
                transactionInManager.updateBatchStatus(target.getbatchUploadId(), 22, "");

                /* Need to update the uploaded batch transaction status */
                transactionInManager.updateTransactionStatus(target.getbatchUploadId(),
                        target.gettransactionInId(), 0, 37);

                /* Update the downloaded batch transaction status */
                transactionOutDAO.updateTargetTransasctionStatus(target.getbatchDLId(), 37);

            }

        }

        /* get the batch details */
        batchDownloads batchDetails = transactionOutDAO.getBatchDetails(batchId);

        /* Get the Rhapsody Details */
        configurationRhapsodyFields rhapsodyDetails = configurationTransportManager
                .getTransRhapsodyDetailsPush(transportDetails.getId());

        // the file is in output folder already, we need to rebuild path and move it

        fileSystem dir = new fileSystem();
        String filelocation = transportDetails.getfileLocation();
        filelocation = filelocation.replace("/bowlink/", "");
        dir.setDirByName(filelocation);

        File sourceFile = new File(dir.getDir() + batchDetails.getoutputFIleName());
        File targetFile = new File(
                dir.setPathFromRoot(rhapsodyDetails.getDirectory()) + batchDetails.getoutputFIleName());
        //move the file over and update the status to complete
        Files.move(sourceFile.toPath(), targetFile.toPath(), StandardCopyOption.REPLACE_EXISTING);

        transactionOutDAO.updateBatchStatus(batchId, 23);

        for (transactionTarget target : targets) {

            /* Need to update the uploaded batch status */
            transactionInManager.updateBatchStatus(target.getbatchUploadId(), 23, "");

            /* Need to update the uploaded batch transaction status */
            transactionInManager.updateTransactionStatus(target.getbatchUploadId(), target.gettransactionInId(),
                    0, 20);

            /* Update the downloaded batch transaction status */
            transactionOutDAO.updateTargetTransasctionStatus(target.getbatchDLId(), 20);

        }

    } catch (Exception e) {
        e.printStackTrace();
        System.err.println(
                "RhapsodyTargetFile - Error occurred trying to move a batch target. batchId: " + batchId);
    }

}

From source file:gov.osti.services.Metadata.java

/**
 * Store a File to a specific directory location. All files associated with
 * a CODEID are stored in the same folder.
 *
 * @param in the InputStream containing the file content
 * @param codeId the CODE ID associated with this file content
 * @param fileName the base file name of the file
 * @param basePath the base path destination for the file content
 * @return the absolute filesystem path to the file
 * @throws IOException on IO errors//from   w  ww . j  a v  a  2s . c o  m
 */
private static String writeFile(InputStream in, Long codeId, String fileName, String basePath)
        throws IOException {
    // store this file in a designated base path
    java.nio.file.Path destination = Paths.get(basePath, String.valueOf(codeId), fileName);
    // make intervening folders if needed
    Files.createDirectories(destination.getParent());
    // save it (CLOBBER existing, if one there)
    Files.copy(in, destination, StandardCopyOption.REPLACE_EXISTING);

    return destination.toString();
}

From source file:client.welcome2.java

private void SupplierUpdateButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_SupplierUpdateButtonActionPerformed

    fn = SupplierContractIDText1.getText();

    try {/*from  w  ww .j a  v a 2  s .co m*/

        String sn = SupplierNameText1.getText();
        String sa = SupplierAddressText1.getText();
        String sp = SupplierPhoneText1.getText();
        String se = SupplierEmailText1.getText();
        String sc = SupplierContractIDText1.getText();

        String sql = "update suppliers set supplier_name='" + sn + "',supplier_address='" + sa
                + "',supplier_phone='" + sp + "',supplier_email='" + se + "',supplier_contract_id='" + sc
                + "' where supplier_id='" + tableClick + "'";
        pst = conn.prepareStatement(sql);
        pst.execute();
        JOptionPane.showMessageDialog(null, "Supplier Details Updated");
        update_Supplier_table();
    }

    catch (Exception e) {
        JOptionPane.showMessageDialog(null, e);
    }

    try {

        if (!SupplierUploadText1.getText().isEmpty()) {
            Path dest = Paths
                    .get("C:/Users/Felix/Documents/NetBeansProjects/Yatzig/src/Contracts/" + fn + ".pdf");
            Path source = Paths.get(supplier_filename_update);
            Files.copy(source, dest, StandardCopyOption.REPLACE_EXISTING);

            SupplierNameText1.setText(" ");
            SupplierAddressText1.setText(" ");
            SupplierPhoneText1.setText(" ");
            SupplierEmailText1.setText(" ");
            SupplierContractIDText1.setText(" ");
            SupplierUploadText1.setText(" ");

        }

    } catch (Exception e) {
        JOptionPane.showMessageDialog(null, e);
    }
}

From source file:org.roda.wui.api.controllers.BrowserHelper.java

public static DescriptiveMetadata createOrUpdateAIPDescriptiveMetadataFile(String aipId,
        String representationId, String metadataId, String metadataType, String metadataVersion,
        Map<String, String> properties, InputStream is, boolean create)
        throws GenericException, RequestNotValidException, NotFoundException, AuthorizationDeniedException,
        AlreadyExistsException, ValidationException {
    Path file = null;//from  w  w w. j  a  va 2  s .  co  m
    DescriptiveMetadata dm = null;
    try {
        ModelService model = RodaCoreFactory.getModelService();
        file = Files.createTempFile("descriptive", ".tmp");
        Files.copy(is, file, StandardCopyOption.REPLACE_EXISTING);
        ContentPayload payload = new FSPathContentPayload(file);

        if (create) {
            dm = model.createDescriptiveMetadata(aipId, representationId, metadataId, payload, metadataType,
                    metadataVersion);
        } else {
            dm = model.updateDescriptiveMetadata(aipId, representationId, metadataId, payload, metadataType,
                    metadataVersion, properties);
        }

    } catch (IOException e) {
        throw new GenericException("Error creating or updating AIP descriptive metadata file", e);
    } finally {
        FSUtils.deletePathQuietly(file);
    }

    return dm;
}

From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java

/**
 * This method restores previously saved w2v model. File can be in one of the following formats:
 * 1) Binary model, either compressed or not. Like well-known Google Model
 * 2) Popular CSV word2vec text format/* w w  w .  ja v  a2 s . c o m*/
 * 3) DL4j compressed format
 *
 * In return you get StaticWord2Vec model, which might be used as lookup table only in multi-gpu environment.
 *
 * @param file File should point to previously saved w2v model
 * @return
 */
// TODO: this method needs better name :)
public static WordVectors loadStaticModel(File file) {
    if (!file.exists() || file.isDirectory())
        throw new RuntimeException(
                new FileNotFoundException("File [" + file.getAbsolutePath() + "] was not found"));

    int originalFreq = Nd4j.getMemoryManager().getOccasionalGcFrequency();
    boolean originalPeriodic = Nd4j.getMemoryManager().isPeriodicGcActive();

    if (originalPeriodic)
        Nd4j.getMemoryManager().togglePeriodicGc(false);

    Nd4j.getMemoryManager().setOccasionalGcFrequency(50000);

    CompressedRamStorage<Integer> storage = new CompressedRamStorage.Builder<Integer>()
            .useInplaceCompression(false).setCompressor(new NoOp()).emulateIsAbsent(false).build();

    VocabCache<VocabWord> vocabCache = new AbstractCache.Builder<VocabWord>().build();

    // now we need to define which file format we have here
    // if zip - that's dl4j format
    try {
        log.debug("Trying DL4j format...");
        File tmpFileSyn0 = File.createTempFile("word2vec", "syn");

        ZipFile zipFile = new ZipFile(file);
        ZipEntry syn0 = zipFile.getEntry("syn0.txt");
        InputStream stream = zipFile.getInputStream(syn0);

        Files.copy(stream, Paths.get(tmpFileSyn0.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING);
        storage.clear();

        try (Reader reader = new CSVReader(tmpFileSyn0)) {
            while (reader.hasNext()) {
                Pair<VocabWord, float[]> pair = reader.next();
                VocabWord word = pair.getFirst();
                storage.store(word.getIndex(), pair.getSecond());

                vocabCache.addToken(word);
                vocabCache.addWordToIndex(word.getIndex(), word.getLabel());

                Nd4j.getMemoryManager().invokeGcOccasionally();
            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        } finally {
            if (originalPeriodic)
                Nd4j.getMemoryManager().togglePeriodicGc(true);

            Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);
        }
    } catch (Exception e) {
        //
        try {
            // try to load file as text csv
            vocabCache = new AbstractCache.Builder<VocabWord>().build();
            storage.clear();
            log.debug("Trying CSVReader...");
            try (Reader reader = new CSVReader(file)) {
                while (reader.hasNext()) {
                    Pair<VocabWord, float[]> pair = reader.next();
                    VocabWord word = pair.getFirst();
                    storage.store(word.getIndex(), pair.getSecond());

                    vocabCache.addToken(word);
                    vocabCache.addWordToIndex(word.getIndex(), word.getLabel());

                    Nd4j.getMemoryManager().invokeGcOccasionally();
                }
            } catch (Exception ef) {
                // we throw away this exception, and trying to load data as binary model
                throw new RuntimeException(ef);
            } finally {
                if (originalPeriodic)
                    Nd4j.getMemoryManager().togglePeriodicGc(true);

                Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);
            }
        } catch (Exception ex) {
            // otherwise it's probably google model. which might be compressed or not
            log.debug("Trying BinaryReader...");
            vocabCache = new AbstractCache.Builder<VocabWord>().build();
            storage.clear();
            try (Reader reader = new BinaryReader(file)) {
                while (reader.hasNext()) {
                    Pair<VocabWord, float[]> pair = reader.next();
                    VocabWord word = pair.getFirst();

                    storage.store(word.getIndex(), pair.getSecond());

                    vocabCache.addToken(word);
                    vocabCache.addWordToIndex(word.getIndex(), word.getLabel());

                    Nd4j.getMemoryManager().invokeGcOccasionally();
                }
            } catch (Exception ez) {
                throw new RuntimeException("Unable to guess input file format");
            } finally {
                if (originalPeriodic)
                    Nd4j.getMemoryManager().togglePeriodicGc(true);

                Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);
            }
        } finally {
            if (originalPeriodic)
                Nd4j.getMemoryManager().togglePeriodicGc(true);

            Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);
        }
    }

    StaticWord2Vec word2Vec = new StaticWord2Vec.Builder(storage, vocabCache).build();

    return word2Vec;
}

From source file:edu.harvard.iq.dataverse.ingest.IngestServiceBean.java

public static void main(String[] args) {

    String file = args[0];/*from  w ww.  j a  v  a2s.  c  om*/
    String type = args[1];

    if (file == null || type == null || "".equals(file) || "".equals(type)) {
        System.err.println("Usage: java edu.harvard.iq.dataverse.ingest.IngestServiceBean <file> <type>.");
        System.exit(1);
    }

    BufferedInputStream fileInputStream = null;

    try {
        fileInputStream = new BufferedInputStream(new FileInputStream(new File(file)));
    } catch (FileNotFoundException notfoundEx) {
        fileInputStream = null;
    }

    if (fileInputStream == null) {
        System.err.println("Could not open file " + file + ".");
        System.exit(1);
    }

    TabularDataFileReader ingestPlugin = getTabDataReaderByMimeType(type);

    if (ingestPlugin == null) {
        System.err.println("Could not locate an ingest plugin for type " + type + ".");
        System.exit(1);
    }

    TabularDataIngest tabDataIngest = null;

    try {
        tabDataIngest = ingestPlugin.read(fileInputStream, null);
    } catch (IOException ingestEx) {
        System.err.println("Caught an exception trying to ingest file " + file + ".");
        System.exit(1);
    }

    try {
        if (tabDataIngest != null) {
            File tabFile = tabDataIngest.getTabDelimitedFile();

            if (tabDataIngest.getDataTable() != null && tabFile != null && tabFile.exists()) {

                String tabFilename = FileUtil.replaceExtension(file, "tab");

                Files.copy(Paths.get(tabFile.getAbsolutePath()), Paths.get(tabFilename),
                        StandardCopyOption.REPLACE_EXISTING);

                DataTable dataTable = tabDataIngest.getDataTable();

                System.out.println("NVARS: " + dataTable.getVarQuantity());
                System.out.println("NOBS: " + dataTable.getCaseQuantity());
                System.out.println("UNF: " + dataTable.getUnf());

                for (int i = 0; i < dataTable.getVarQuantity(); i++) {
                    String vartype = "";

                    if (dataTable.getDataVariables().get(i).isIntervalContinuous()) {
                        vartype = "numeric-continuous";
                    } else {
                        if (dataTable.getDataVariables().get(i).isTypeNumeric()) {
                            vartype = "numeric-discrete";
                        } else {
                            vartype = "character";
                        }
                    }

                    System.out.print("VAR" + i + " ");
                    System.out.print(dataTable.getDataVariables().get(i).getName() + " ");
                    System.out.print(vartype + " ");
                    System.out.print(dataTable.getDataVariables().get(i).getUnf());
                    System.out.println();

                }

            } else {
                System.err.println("Ingest failed to produce tab file or data table for file " + file + ".");
                System.exit(1);
            }
        } else {
            System.err.println("Ingest resulted in a null tabDataIngest object for file " + file + ".");
            System.exit(1);
        }
    } catch (IOException ex) {
        System.err.println("Caught an exception trying to save ingested data for file " + file + ".");
        System.exit(1);
    }

}