Example usage for java.util.zip ZipFile getInputStream

List of usage examples for java.util.zip ZipFile getInputStream

Introduction

In this page you can find the example usage for java.util.zip ZipFile getInputStream.

Prototype

public InputStream getInputStream(ZipEntry entry) throws IOException 

Source Link

Document

Returns an input stream for reading the contents of the specified zip file entry.

Usage

From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java

/**
 * This method restores Word2Vec model previously saved with writeWord2VecModel
 *
 * PLEASE NOTE: This method loads FULL model, so don't use it if you're only going to use weights.
 *
 * @param file/*from  w  w w .  j a  v a  2 s .c om*/
 * @return
 * @throws IOException
 */
@Deprecated
public static Word2Vec readWord2Vec(File file) throws IOException {
    File tmpFileSyn0 = File.createTempFile("word2vec", "0");
    File tmpFileSyn1 = File.createTempFile("word2vec", "1");
    File tmpFileC = File.createTempFile("word2vec", "c");
    File tmpFileH = File.createTempFile("word2vec", "h");
    File tmpFileF = File.createTempFile("word2vec", "f");

    tmpFileSyn0.deleteOnExit();
    tmpFileSyn1.deleteOnExit();
    tmpFileH.deleteOnExit();
    tmpFileC.deleteOnExit();
    tmpFileF.deleteOnExit();

    int originalFreq = Nd4j.getMemoryManager().getOccasionalGcFrequency();
    boolean originalPeriodic = Nd4j.getMemoryManager().isPeriodicGcActive();

    if (originalPeriodic)
        Nd4j.getMemoryManager().togglePeriodicGc(false);

    Nd4j.getMemoryManager().setOccasionalGcFrequency(50000);

    try {

        ZipFile zipFile = new ZipFile(file);
        ZipEntry syn0 = zipFile.getEntry("syn0.txt");
        InputStream stream = zipFile.getInputStream(syn0);

        Files.copy(stream, Paths.get(tmpFileSyn0.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING);

        ZipEntry syn1 = zipFile.getEntry("syn1.txt");
        stream = zipFile.getInputStream(syn1);

        Files.copy(stream, Paths.get(tmpFileSyn1.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING);

        ZipEntry codes = zipFile.getEntry("codes.txt");
        stream = zipFile.getInputStream(codes);

        Files.copy(stream, Paths.get(tmpFileC.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING);

        ZipEntry huffman = zipFile.getEntry("huffman.txt");
        stream = zipFile.getInputStream(huffman);

        Files.copy(stream, Paths.get(tmpFileH.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING);

        ZipEntry config = zipFile.getEntry("config.json");
        stream = zipFile.getInputStream(config);
        StringBuilder builder = new StringBuilder();
        try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) {
            String line;
            while ((line = reader.readLine()) != null) {
                builder.append(line);
            }
        }

        VectorsConfiguration configuration = VectorsConfiguration.fromJson(builder.toString().trim());

        // we read first 4 files as w2v model
        Word2Vec w2v = readWord2VecFromText(tmpFileSyn0, tmpFileSyn1, tmpFileC, tmpFileH, configuration);

        // we read frequencies from frequencies.txt, however it's possible that we might not have this file
        ZipEntry frequencies = zipFile.getEntry("frequencies.txt");
        if (frequencies != null) {
            stream = zipFile.getInputStream(frequencies);
            try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) {
                String line;
                while ((line = reader.readLine()) != null) {
                    String[] split = line.split(" ");
                    VocabWord word = w2v.getVocab().tokenFor(decodeB64(split[0]));
                    word.setElementFrequency((long) Double.parseDouble(split[1]));
                    word.setSequencesCount((long) Double.parseDouble(split[2]));
                }
            }
        }

        ZipEntry zsyn1Neg = zipFile.getEntry("syn1Neg.txt");
        if (zsyn1Neg != null) {
            stream = zipFile.getInputStream(zsyn1Neg);

            try (InputStreamReader isr = new InputStreamReader(stream);
                    BufferedReader reader = new BufferedReader(isr)) {
                String line = null;
                List<INDArray> rows = new ArrayList<>();
                while ((line = reader.readLine()) != null) {
                    String[] split = line.split(" ");
                    double array[] = new double[split.length];
                    for (int i = 0; i < split.length; i++) {
                        array[i] = Double.parseDouble(split[i]);
                    }
                    rows.add(Nd4j.create(array));
                }

                // it's possible to have full model without syn1Neg
                if (rows.size() > 0) {
                    INDArray syn1Neg = Nd4j.vstack(rows);
                    ((InMemoryLookupTable) w2v.getLookupTable()).setSyn1Neg(syn1Neg);
                }
            }
        }

        return w2v;
    } finally {
        if (originalPeriodic)
            Nd4j.getMemoryManager().togglePeriodicGc(true);

        Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);
    }
}

From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java

/**
 * This method restores previously saved w2v model. File can be in one of the following formats:
 * 1) Binary model, either compressed or not. Like well-known Google Model
 * 2) Popular CSV word2vec text format//from  w ww.j  av  a2 s .  c o  m
 * 3) DL4j compressed format
 *
 * In return you get StaticWord2Vec model, which might be used as lookup table only in multi-gpu environment.
 *
 * @param file File should point to previously saved w2v model
 * @return
 */
// TODO: this method needs better name :)
public static WordVectors loadStaticModel(File file) {
    if (!file.exists() || file.isDirectory())
        throw new RuntimeException(
                new FileNotFoundException("File [" + file.getAbsolutePath() + "] was not found"));

    int originalFreq = Nd4j.getMemoryManager().getOccasionalGcFrequency();
    boolean originalPeriodic = Nd4j.getMemoryManager().isPeriodicGcActive();

    if (originalPeriodic)
        Nd4j.getMemoryManager().togglePeriodicGc(false);

    Nd4j.getMemoryManager().setOccasionalGcFrequency(50000);

    CompressedRamStorage<Integer> storage = new CompressedRamStorage.Builder<Integer>()
            .useInplaceCompression(false).setCompressor(new NoOp()).emulateIsAbsent(false).build();

    VocabCache<VocabWord> vocabCache = new AbstractCache.Builder<VocabWord>().build();

    // now we need to define which file format we have here
    // if zip - that's dl4j format
    try {
        log.debug("Trying DL4j format...");
        File tmpFileSyn0 = File.createTempFile("word2vec", "syn");

        ZipFile zipFile = new ZipFile(file);
        ZipEntry syn0 = zipFile.getEntry("syn0.txt");
        InputStream stream = zipFile.getInputStream(syn0);

        Files.copy(stream, Paths.get(tmpFileSyn0.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING);
        storage.clear();

        try (Reader reader = new CSVReader(tmpFileSyn0)) {
            while (reader.hasNext()) {
                Pair<VocabWord, float[]> pair = reader.next();
                VocabWord word = pair.getFirst();
                storage.store(word.getIndex(), pair.getSecond());

                vocabCache.addToken(word);
                vocabCache.addWordToIndex(word.getIndex(), word.getLabel());

                Nd4j.getMemoryManager().invokeGcOccasionally();
            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        } finally {
            if (originalPeriodic)
                Nd4j.getMemoryManager().togglePeriodicGc(true);

            Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);
        }
    } catch (Exception e) {
        //
        try {
            // try to load file as text csv
            vocabCache = new AbstractCache.Builder<VocabWord>().build();
            storage.clear();
            log.debug("Trying CSVReader...");
            try (Reader reader = new CSVReader(file)) {
                while (reader.hasNext()) {
                    Pair<VocabWord, float[]> pair = reader.next();
                    VocabWord word = pair.getFirst();
                    storage.store(word.getIndex(), pair.getSecond());

                    vocabCache.addToken(word);
                    vocabCache.addWordToIndex(word.getIndex(), word.getLabel());

                    Nd4j.getMemoryManager().invokeGcOccasionally();
                }
            } catch (Exception ef) {
                // we throw away this exception, and trying to load data as binary model
                throw new RuntimeException(ef);
            } finally {
                if (originalPeriodic)
                    Nd4j.getMemoryManager().togglePeriodicGc(true);

                Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);
            }
        } catch (Exception ex) {
            // otherwise it's probably google model. which might be compressed or not
            log.debug("Trying BinaryReader...");
            vocabCache = new AbstractCache.Builder<VocabWord>().build();
            storage.clear();
            try (Reader reader = new BinaryReader(file)) {
                while (reader.hasNext()) {
                    Pair<VocabWord, float[]> pair = reader.next();
                    VocabWord word = pair.getFirst();

                    storage.store(word.getIndex(), pair.getSecond());

                    vocabCache.addToken(word);
                    vocabCache.addWordToIndex(word.getIndex(), word.getLabel());

                    Nd4j.getMemoryManager().invokeGcOccasionally();
                }
            } catch (Exception ez) {
                throw new RuntimeException("Unable to guess input file format");
            } finally {
                if (originalPeriodic)
                    Nd4j.getMemoryManager().togglePeriodicGc(true);

                Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);
            }
        } finally {
            if (originalPeriodic)
                Nd4j.getMemoryManager().togglePeriodicGc(true);

            Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);
        }
    }

    StaticWord2Vec word2Vec = new StaticWord2Vec.Builder(storage, vocabCache).build();

    return word2Vec;
}

From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java

/**
 * This method restores ParagraphVectors model previously saved with writeParagraphVectors()
 *
 * @return/*from  w  w  w . j  ava  2s .  c  o m*/
 */
public static ParagraphVectors readParagraphVectors(File file) throws IOException {
    File tmpFileL = File.createTempFile("paravec", "l");
    tmpFileL.deleteOnExit();

    Word2Vec w2v = readWord2Vec(file);

    // and "convert" it to ParaVec model + optionally trying to restore labels information
    ParagraphVectors vectors = new ParagraphVectors.Builder(w2v.getConfiguration()).vocabCache(w2v.getVocab())
            .lookupTable(w2v.getLookupTable()).resetModel(false).build();

    ZipFile zipFile = new ZipFile(file);

    // now we try to restore labels information
    ZipEntry labels = zipFile.getEntry("labels.txt");
    if (labels != null) {
        InputStream stream = zipFile.getInputStream(labels);

        Files.copy(stream, Paths.get(tmpFileL.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING);
        try (BufferedReader reader = new BufferedReader(new FileReader(tmpFileL))) {
            String line;
            while ((line = reader.readLine()) != null) {
                VocabWord word = vectors.getVocab().tokenFor(decodeB64(line.trim()));
                if (word != null) {
                    word.markAsLabel(true);
                }
            }
        }
    }

    vectors.extractLabels();

    return vectors;
}

From source file:com.simpligility.maven.plugins.android.phase09package.ApkMojo.java

private void updateWithMetaInf(ZipOutputStream zos, File jarFile, Set<String> entries, boolean metaInfOnly)
        throws IOException {
    ZipFile zin = new ZipFile(jarFile);

    for (Enumeration<? extends ZipEntry> en = zin.entries(); en.hasMoreElements();) {
        ZipEntry ze = en.nextElement();

        if (ze.isDirectory()) {
            continue;
        }/*  www  . j av  a 2  s.  c o  m*/

        String zn = ze.getName();

        if (metaInfOnly) {
            if (!zn.startsWith("META-INF/")) {
                continue;
            }

            if (!this.apkMetaInf.isIncluded(zn)) {
                continue;
            }
        }

        boolean resourceTransformed = false;

        if (transformers != null) {
            for (ResourceTransformer transformer : transformers) {
                if (transformer.canTransformResource(zn)) {
                    getLog().info("Transforming " + zn + " using " + transformer.getClass().getName());
                    InputStream is = zin.getInputStream(ze);
                    transformer.processResource(zn, is, null);
                    is.close();
                    resourceTransformed = true;
                    break;
                }
            }
        }

        if (!resourceTransformed) {
            // Avoid duplicates that aren't accounted for by the resource transformers
            if (metaInfOnly && this.extractDuplicates && !entries.add(zn)) {
                continue;
            }

            InputStream is = zin.getInputStream(ze);

            final ZipEntry ne;
            if (ze.getMethod() == ZipEntry.STORED) {
                ne = new ZipEntry(ze);
            } else {
                ne = new ZipEntry(zn);
            }

            zos.putNextEntry(ne);

            copyStreamWithoutClosing(is, zos);

            is.close();
            zos.closeEntry();
        }
    }

    zin.close();
}

From source file:net.amigocraft.mpt.util.MiscUtil.java

public static boolean unzip(ZipFile zip, File dest, List<String> files) throws MPTException {
    boolean returnValue = true;
    try {/*from   w  w  w.j  av a  2 s.  c  o  m*/
        List<String> existingDirs = new ArrayList<>();
        Enumeration<? extends ZipEntry> en = zip.entries();
        entryLoop: while (en.hasMoreElements()) {
            ZipEntry entry = en.nextElement();
            String name = entry.getName().startsWith("./")
                    ? entry.getName().substring(2, entry.getName().length())
                    : entry.getName();
            File file = new File(dest, name);
            if (entry.isDirectory()) {
                if (file.exists()) {
                    if (DISALLOW_MERGE) {
                        existingDirs.add(name);
                        if (VERBOSE)
                            Main.log.warning("Refusing to extract directory " + name + ": already exists");
                    }
                }
            } else {
                files.add(name);
                for (String dir : DISALLOWED_DIRECTORIES) {
                    if (file.getPath().startsWith(dir)) {
                        if (VERBOSE)
                            Main.log.warning("Refusing to extract " + name + " from " + zip.getName()
                                    + ": parent directory \"" + dir + "\" is not allowed");
                        continue entryLoop;
                    }
                }
                if (DISALLOW_MERGE) {
                    for (String dir : existingDirs) {
                        if (file.getPath().substring(2, file.getPath().length()).replace(File.separator, "/")
                                .startsWith(dir)) {
                            continue entryLoop;
                        }
                    }
                }
                if (!DISALLOW_OVERWRITE || !file.exists()) {
                    file.getParentFile().mkdirs();
                    for (String ext : DISALLOWED_EXTENSIONS) {
                        if (file.getName().endsWith(ext)) {
                            if (VERBOSE)
                                Main.log.warning("Refusing to extract " + name + " from " + zip.getName()
                                        + ": extension \"" + ext + "\" is not allowed");
                            returnValue = false;
                            continue entryLoop;
                        }
                    }
                    BufferedInputStream bIs = new BufferedInputStream(zip.getInputStream(entry));
                    int b;
                    byte[] buffer = new byte[1024];
                    FileOutputStream fOs = new FileOutputStream(file);
                    BufferedOutputStream bOs = new BufferedOutputStream(fOs, 1024);
                    while ((b = bIs.read(buffer, 0, 1024)) != -1)
                        bOs.write(buffer, 0, b);
                    bOs.flush();
                    bOs.close();
                    bIs.close();
                } else {
                    if (VERBOSE)
                        Main.log.warning(
                                "Refusing to extract " + name + " from " + zip.getName() + ": already exists");
                    returnValue = false;
                }
            }
        }
    } catch (Exception ex) {
        ex.printStackTrace(); //TODO
        throw new MPTException(ERROR_COLOR + "Failed to extract archive!");
    }
    return returnValue;
}

From source file:com.flexive.core.storage.GenericDivisionImporter.java

/**
 * Get division export information from an exported archive
 *
 * @param zip zip file containing the export
 * @return FxDivisionExportInfo//from w w  w. jav a2  s.  c o m
 * @throws FxApplicationException on errors
 */
public FxDivisionExportInfo getDivisionExportInfo(ZipFile zip) throws FxApplicationException {
    ZipEntry ze = getZipEntry(zip, FILE_BUILD_INFOS);
    FxDivisionExportInfo exportInfo;
    try {
        DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
        Document document = builder.parse(zip.getInputStream(ze));

        XPath xPath = XPathFactory.newInstance().newXPath();

        String[] drops;
        String dropsRaw = xPath.evaluate("/flexive/drops", document);
        if (dropsRaw == null || !dropsRaw.startsWith("["))
            drops = new String[0];
        else {
            dropsRaw = dropsRaw.substring(1, dropsRaw.length() - 1);
            drops = dropsRaw.split(", ");
        }
        exportInfo = new FxDivisionExportInfo(Integer.parseInt(xPath.evaluate("/flexive/division", document)),
                Integer.parseInt(xPath.evaluate("/flexive/schema", document)),
                Integer.parseInt(xPath.evaluate("/flexive/build", document)),
                xPath.evaluate("/flexive/verbose", document), xPath.evaluate("/flexive/appserver", document),
                xPath.evaluate("/flexive/database", document), xPath.evaluate("/flexive/dbdriver", document),
                xPath.evaluate("/flexive/domain", document), Arrays.asList(drops),
                xPath.evaluate("/flexive/user", document),
                FxFormatUtils.getDateTimeFormat().parse(xPath.evaluate("/flexive/date", document)));
    } catch (Exception e) {
        throw new FxApplicationException(e, "ex.import.parseInfoFailed", e.getMessage());
    }
    return exportInfo;
}

From source file:org.deeplearning4j.models.embeddings.loader.WordVectorSerializer.java

/**
 * This method//www .j  ava2 s .c  om
 * 1) Binary model, either compressed or not. Like well-known Google Model
 * 2) Popular CSV word2vec text format
 * 3) DL4j compressed format
 *
 * Please note: if extended data isn't available, only weights will be loaded instead.
 *
 * @param file
 * @param extendedModel if TRUE, we'll try to load HS states & Huffman tree info, if FALSE, only weights will be loaded
 * @return
 */
public static Word2Vec readWord2VecModel(@NonNull File file, boolean extendedModel) {
    InMemoryLookupTable<VocabWord> lookupTable = new InMemoryLookupTable<>();
    AbstractCache<VocabWord> vocabCache = new AbstractCache<>();
    Word2Vec vec;
    INDArray syn0 = null;
    VectorsConfiguration configuration = new VectorsConfiguration();

    if (!file.exists() || !file.isFile())
        throw new ND4JIllegalStateException("File [" + file.getAbsolutePath() + "] doesn't exist");

    int originalFreq = Nd4j.getMemoryManager().getOccasionalGcFrequency();
    boolean originalPeriodic = Nd4j.getMemoryManager().isPeriodicGcActive();

    if (originalPeriodic)
        Nd4j.getMemoryManager().togglePeriodicGc(false);

    Nd4j.getMemoryManager().setOccasionalGcFrequency(50000);

    // try to load zip format
    try {
        if (extendedModel) {
            log.debug("Trying full model restoration...");
            // this method just loads full compressed model

            if (originalPeriodic)
                Nd4j.getMemoryManager().togglePeriodicGc(true);

            Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);

            return readWord2Vec(file);
        } else {
            log.debug("Trying simplified model restoration...");

            File tmpFileSyn0 = File.createTempFile("word2vec", "syn");
            File tmpFileConfig = File.createTempFile("word2vec", "config");
            // we don't need full model, so we go directly to syn0 file

            ZipFile zipFile = new ZipFile(file);
            ZipEntry syn = zipFile.getEntry("syn0.txt");
            InputStream stream = zipFile.getInputStream(syn);

            Files.copy(stream, Paths.get(tmpFileSyn0.getAbsolutePath()), StandardCopyOption.REPLACE_EXISTING);

            // now we're restoring configuration saved earlier
            ZipEntry config = zipFile.getEntry("config.json");
            if (config != null) {
                stream = zipFile.getInputStream(config);

                StringBuilder builder = new StringBuilder();
                try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) {
                    String line;
                    while ((line = reader.readLine()) != null) {
                        builder.append(line);
                    }
                }

                configuration = VectorsConfiguration.fromJson(builder.toString().trim());
            }

            ZipEntry ve = zipFile.getEntry("frequencies.txt");
            if (ve != null) {
                stream = zipFile.getInputStream(ve);
                AtomicInteger cnt = new AtomicInteger(0);
                try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) {
                    String line;
                    while ((line = reader.readLine()) != null) {
                        String[] split = line.split(" ");
                        VocabWord word = new VocabWord(Double.valueOf(split[1]), decodeB64(split[0]));
                        word.setIndex(cnt.getAndIncrement());
                        word.incrementSequencesCount(Long.valueOf(split[2]));

                        vocabCache.addToken(word);
                        vocabCache.addWordToIndex(word.getIndex(), word.getLabel());

                        Nd4j.getMemoryManager().invokeGcOccasionally();
                    }
                }
            }

            List<INDArray> rows = new ArrayList<>();
            // basically read up everything, call vstacl and then return model
            try (Reader reader = new CSVReader(tmpFileSyn0)) {
                AtomicInteger cnt = new AtomicInteger(0);
                while (reader.hasNext()) {
                    Pair<VocabWord, float[]> pair = reader.next();
                    VocabWord word = pair.getFirst();
                    INDArray vector = Nd4j.create(pair.getSecond());

                    if (ve != null) {
                        if (syn0 == null)
                            syn0 = Nd4j.create(vocabCache.numWords(), vector.length());

                        syn0.getRow(cnt.getAndIncrement()).assign(vector);
                    } else {
                        rows.add(vector);

                        vocabCache.addToken(word);
                        vocabCache.addWordToIndex(word.getIndex(), word.getLabel());
                    }

                    Nd4j.getMemoryManager().invokeGcOccasionally();
                }
            } catch (Exception e) {
                throw new RuntimeException(e);
            } finally {
                if (originalPeriodic)
                    Nd4j.getMemoryManager().togglePeriodicGc(true);

                Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);
            }

            if (syn0 == null && vocabCache.numWords() > 0)
                syn0 = Nd4j.vstack(rows);

            if (syn0 == null) {
                log.error("Can't build syn0 table");
                throw new DL4JInvalidInputException("Can't build syn0 table");
            }

            lookupTable = new InMemoryLookupTable.Builder<VocabWord>().cache(vocabCache)
                    .vectorLength(syn0.columns()).useHierarchicSoftmax(false).useAdaGrad(false).build();

            lookupTable.setSyn0(syn0);

            try {
                tmpFileSyn0.delete();
                tmpFileConfig.delete();
            } catch (Exception e) {
                //
            }
        }
    } catch (Exception e) {
        // let's try to load this file as csv file
        try {
            log.debug("Trying CSV model restoration...");

            Pair<InMemoryLookupTable, VocabCache> pair = loadTxt(file);
            lookupTable = pair.getFirst();
            vocabCache = (AbstractCache<VocabWord>) pair.getSecond();
        } catch (Exception ex) {
            // we fallback to trying binary model instead
            try {
                log.debug("Trying binary model restoration...");

                if (originalPeriodic)
                    Nd4j.getMemoryManager().togglePeriodicGc(true);

                Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);

                vec = loadGoogleModel(file, true, true);
                return vec;
            } catch (Exception ey) {
                // try to load without linebreaks
                try {
                    if (originalPeriodic)
                        Nd4j.getMemoryManager().togglePeriodicGc(true);

                    Nd4j.getMemoryManager().setOccasionalGcFrequency(originalFreq);

                    vec = loadGoogleModel(file, true, false);
                    return vec;
                } catch (Exception ez) {
                    throw new RuntimeException(
                            "Unable to guess input file format. Please use corresponding loader directly");
                }
            }
        }
    }

    Word2Vec.Builder builder = new Word2Vec.Builder(configuration).lookupTable(lookupTable).useAdaGrad(false)
            .vocabCache(vocabCache).layerSize(lookupTable.layerSize())

            // we don't use hs here, because model is incomplete
            .useHierarchicSoftmax(false).resetModel(false);

    /*
    Trying to restore TokenizerFactory & TokenPreProcessor
     */

    TokenizerFactory factory = getTokenizerFactory(configuration);
    if (factory != null)
        builder.tokenizerFactory(factory);

    vec = builder.build();

    return vec;
}

From source file:com.edgenius.wiki.service.impl.BackupServiceImpl.java

public String getFileComment(File zipFile) {
    String comment = "";
    ZipFile zip = null;
    try {/* www  .  java 2 s.  c  o  m*/
        zip = new ZipFile(zipFile);
        ZipEntry entry = zip.getEntry(COMMENT_FILE_NAME);
        if (entry != null) {
            comment = IOUtils.toString(zip.getInputStream(entry));
        }
    } catch (Exception e) {
        log.info("backup/restore file comment not available:" + zipFile.getAbsolutePath());
    } finally {
        if (zip != null)
            try {
                zip.close();
            } catch (Exception e) {
            }
    }

    return comment;
}

From source file:org.hammurapi.TaskBase.java

protected File processArchive() {
    if (archive == null) {
        return null;
    }//from   ww  w  . j ava  2s. c  o m

    String tmpDirProperty = System.getProperty("java.io.tmpdir");
    File tmpDir = tmpDirProperty == null ? new File(".") : new File(tmpDirProperty);
    SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
    String prefix = "har_" + sdf.format(new Date());
    File workDir = unpackDir == null ? new File(tmpDir, prefix) : unpackDir;

    for (int i = 0; unpackDir == null && workDir.exists(); i++) {
        workDir = new File(tmpDir, prefix + "_" + Integer.toString(i, Character.MAX_RADIX));
    }

    if (workDir.exists() || workDir.mkdir()) {
        try {
            ZipFile zipFile = new ZipFile(archive);
            Enumeration entries = zipFile.entries();
            while (entries.hasMoreElements()) {
                ZipEntry entry = (ZipEntry) entries.nextElement();
                if (!entry.getName().endsWith("/")) {
                    File outFile = new File(workDir, entry.getName().replace('/', File.separatorChar));
                    if (!outFile.getParentFile().exists() && !outFile.getParentFile().mkdirs()) {
                        throw new BuildException("Directory does not exist and cannot be created: "
                                + outFile.getParentFile().getAbsolutePath());
                    }

                    log("Archive entry " + entry.getName() + " unpacked to " + outFile.getAbsolutePath(),
                            Project.MSG_DEBUG);

                    byte[] buf = new byte[4096];
                    int l;
                    InputStream in = zipFile.getInputStream(entry);
                    FileOutputStream fos = new FileOutputStream(outFile);
                    while ((l = in.read(buf)) != -1) {
                        fos.write(buf, 0, l);
                    }
                    in.close();
                    fos.close();
                }
            }
            zipFile.close();

            File configFile = new File(workDir, "config.xml");
            if (configFile.exists() && configFile.isFile()) {
                Document configDoc = DocumentBuilderFactory.newInstance().newDocumentBuilder()
                        .parse(configFile);
                processConfig(workDir, configDoc.getDocumentElement());
            } else {
                throw new BuildException("Archive configuration file does not exist or is not a file");
            }
        } catch (ZipException e) {
            throw new BuildException(e.toString(), e);
        } catch (IOException e) {
            throw new BuildException(e.toString(), e);
        } catch (SAXException e) {
            throw new BuildException(e.toString(), e);
        } catch (ParserConfigurationException e) {
            throw new BuildException(e.toString(), e);
        } catch (FactoryConfigurationError e) {
            throw new BuildException(e.toString(), e);
        }
    } else {
        throw new BuildException("Could not create directory " + workDir.getAbsolutePath());
    }
    return unpackDir == null ? workDir : null;
}

From source file:com.mirth.connect.server.controllers.DefaultExtensionController.java

@Override
public InstallationResult extractExtension(InputStream inputStream) {
    Throwable cause = null;// ww  w. j a v a2 s .  c  o  m
    Set<MetaData> metaDataSet = new HashSet<MetaData>();

    File installTempDir = new File(ExtensionController.getExtensionsPath(), "install_temp");

    if (!installTempDir.exists()) {
        installTempDir.mkdir();
    }

    File tempFile = null;
    FileOutputStream tempFileOutputStream = null;
    ZipFile zipFile = null;

    try {
        /*
         * create a new temp file (in the install temp dir) to store the zip file contents
         */
        tempFile = File.createTempFile(ServerUUIDGenerator.getUUID(), ".zip", installTempDir);
        // write the contents of the multipart fileitem to the temp file
        try {
            tempFileOutputStream = new FileOutputStream(tempFile);
            IOUtils.copy(inputStream, tempFileOutputStream);
        } finally {
            IOUtils.closeQuietly(tempFileOutputStream);
        }

        // create a new zip file from the temp file
        zipFile = new ZipFile(tempFile);
        // get a list of all of the entries in the zip file
        Enumeration<? extends ZipEntry> entries = zipFile.entries();

        while (entries.hasMoreElements()) {
            ZipEntry entry = entries.nextElement();
            String entryName = entry.getName();

            if (entryName.endsWith("plugin.xml") || entryName.endsWith("destination.xml")
                    || entryName.endsWith("source.xml")) {
                // parse the extension metadata xml file
                MetaData extensionMetaData = serializer
                        .deserialize(IOUtils.toString(zipFile.getInputStream(entry)), MetaData.class);
                metaDataSet.add(extensionMetaData);

                if (!extensionLoader.isExtensionCompatible(extensionMetaData)) {
                    if (cause == null) {
                        cause = new VersionMismatchException("Extension \"" + entry.getName()
                                + "\" is not compatible with this version of Mirth Connect.");
                    }
                }
            }
        }

        if (cause == null) {
            // reset the entries and extract
            entries = zipFile.entries();

            while (entries.hasMoreElements()) {
                ZipEntry entry = entries.nextElement();

                if (entry.isDirectory()) {
                    /*
                     * assume directories are stored parents first then children.
                     * 
                     * TODO: this is not robust, just for demonstration purposes.
                     */
                    File directory = new File(installTempDir, entry.getName());
                    directory.mkdir();
                } else {
                    // otherwise, write the file out to the install temp dir
                    InputStream zipInputStream = zipFile.getInputStream(entry);
                    OutputStream outputStream = new BufferedOutputStream(
                            new FileOutputStream(new File(installTempDir, entry.getName())));
                    IOUtils.copy(zipInputStream, outputStream);
                    IOUtils.closeQuietly(zipInputStream);
                    IOUtils.closeQuietly(outputStream);
                }
            }
        }
    } catch (Throwable t) {
        cause = new ControllerException("Error extracting extension. " + t.toString(), t);
    } finally {
        if (zipFile != null) {
            try {
                zipFile.close();
            } catch (Exception e) {
                cause = new ControllerException(e);
            }
        }

        // delete the temp file since it is no longer needed
        FileUtils.deleteQuietly(tempFile);
    }

    return new InstallationResult(cause, metaDataSet);
}