Example usage for java.lang String replace

List of usage examples for java.lang String replace

Introduction

In this page you can find the example usage for java.lang String replace.

Prototype

public String replace(CharSequence target, CharSequence replacement) 

Source Link

Document

Replaces each substring of this string that matches the literal target sequence with the specified literal replacement sequence.

Usage

From source file:com.aistor.generate.Generate.java

public static void main(String[] args) throws Exception {

    // ==========  ?? ====================

    // ??????//from  w  ww  .j  a  v a2s  .  c  om
    // ?{packageName}/{moduleName}/{dao,entity,service,web}/{subModuleName}/{className}

    // packageName ????applicationContext.xmlsrping-mvc.xml?base-package?packagesToScan?4?
    String packageName = "com.aistor.modules";

    String moduleName = "factory"; // ???sys
    String subModuleName = ""; // ????? 
    String className = "product"; // ??user
    String classAuthor = "Zaric"; // Zaric
    String functionName = "?"; // ??

    // ???
    Boolean isEnable = false;

    // ==========  ?? ====================

    if (!isEnable) {
        logger.error("????isEnable = true");
        return;
    }

    if (StringUtils.isBlank(moduleName) || StringUtils.isBlank(moduleName) || StringUtils.isBlank(className)
            || StringUtils.isBlank(functionName)) {
        logger.error("??????????????");
        return;
    }

    // ?
    String separator = File.separator;
    String classPath = new DefaultResourceLoader().getResource("").getFile().getPath();
    String templatePath = classPath.replace(
            separator + "webapp" + separator + "WEB-INF" + separator + "classes",
            separator + "java" + separator + "com" + separator + "aistor" + separator + "modules");
    String javaPath = classPath.replace(separator + "webapp" + separator + "WEB-INF" + separator + "classes",
            separator + "java" + separator + (StringUtils.lowerCase(packageName)).replace(".", separator));
    String viewPath = classPath.replace(separator + "classes", separator + "views");

    // ???
    Configuration cfg = new Configuration();
    cfg.setDirectoryForTemplateLoading(
            new File(templatePath.replace("modules", "generate" + separator + "template")));

    // ???
    Map<String, String> model = Maps.newHashMap();
    model.put("packageName", StringUtils.lowerCase(packageName));
    model.put("moduleName", StringUtils.lowerCase(moduleName));
    model.put("subModuleName",
            StringUtils.isNotBlank(subModuleName) ? "." + StringUtils.lowerCase(subModuleName) : "");
    model.put("className", StringUtils.uncapitalize(className));
    model.put("ClassName", StringUtils.capitalize(className));
    model.put("classAuthor", StringUtils.isNotBlank(classAuthor) ? classAuthor : "Generate Tools");
    model.put("classVersion", DateUtils.getDate());
    model.put("functionName", functionName);
    model.put("tableName",
            model.get("moduleName")
                    + (StringUtils.isNotBlank(subModuleName) ? "_" + StringUtils.lowerCase(subModuleName) : "")
                    + "_" + model.get("className"));
    model.put("urlPrefix",
            model.get("moduleName")
                    + (StringUtils.isNotBlank(subModuleName) ? "/" + StringUtils.lowerCase(subModuleName) : "")
                    + "/" + model.get("className"));
    model.put("viewPrefix",
            StringUtils.substringAfterLast(model.get("packageName"), ".") + "/" + model.get("urlPrefix"));
    model.put("permissionPrefix",
            model.get("moduleName")
                    + (StringUtils.isNotBlank(subModuleName) ? ":" + StringUtils.lowerCase(subModuleName) : "")
                    + ":" + model.get("className"));

    // ? Entity
    Template template = cfg.getTemplate("entity.ftl");
    String content = FreeMarkers.renderTemplate(template, model);
    String filePath = javaPath + separator + model.get("moduleName") + separator + "entity" + separator
            + StringUtils.lowerCase(subModuleName) + separator + model.get("ClassName") + ".java";
    writeFile(content, filePath);
    logger.info(filePath);

    // ? Dao
    template = cfg.getTemplate("dao.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath + separator + model.get("moduleName") + separator + "dao" + separator
            + StringUtils.lowerCase(subModuleName) + separator + model.get("ClassName") + "Dao.java";
    writeFile(content, filePath);
    logger.info(filePath);

    // ? Service
    template = cfg.getTemplate("service.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath + separator + model.get("moduleName") + separator + "service" + separator
            + StringUtils.lowerCase(subModuleName) + separator + model.get("ClassName") + "Service.java";
    writeFile(content, filePath);
    logger.info(filePath);

    // ? Controller
    template = cfg.getTemplate("controller.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath + separator + model.get("moduleName") + separator + "web" + separator
            + StringUtils.lowerCase(subModuleName) + separator + model.get("ClassName") + "Controller.java";
    writeFile(content, filePath);
    logger.info(filePath);

    // ? ViewForm
    template = cfg.getTemplate("viewForm.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = viewPath + separator + StringUtils.substringAfterLast(model.get("packageName"), ".") + separator
            + model.get("moduleName") + separator + StringUtils.lowerCase(subModuleName) + separator
            + model.get("className") + "Form.jsp";
    writeFile(content, filePath);
    logger.info(filePath);

    // ? ViewList
    template = cfg.getTemplate("viewList.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = viewPath + separator + StringUtils.substringAfterLast(model.get("packageName"), ".") + separator
            + model.get("moduleName") + separator + StringUtils.lowerCase(subModuleName) + separator
            + model.get("className") + "List.jsp";
    writeFile(content, filePath);
    logger.info(filePath);

    logger.info("????");
}

From source file:net.minecraftforge.fml.common.patcher.GenDiffSet.java

public static void main(String[] args) throws IOException {
    String sourceJar = args[0]; //Clean Vanilla jar minecraft.jar or minecraft_server.jar
    String targetDir = args[1]; //Directory containing obfed output classes, typically mcp/reobf/minecraft
    String deobfData = args[2]; //Path to FML's deobfusication_data.lzma
    String outputDir = args[3]; //Path to place generated .binpatch
    String killTarget = args[4]; //"true" if we should destroy the target file if it generated a successful .binpatch

    LogManager.getLogger("GENDIFF").log(Level.INFO,
            String.format("Creating patches at %s for %s from %s", outputDir, sourceJar, targetDir));
    Delta delta = new Delta();
    FMLDeobfuscatingRemapper remapper = FMLDeobfuscatingRemapper.INSTANCE;
    remapper.setupLoadOnly(deobfData, false);
    JarFile sourceZip = new JarFile(sourceJar);
    boolean kill = killTarget.equalsIgnoreCase("true");

    File f = new File(outputDir);
    f.mkdirs();/*from  ww  w  . ja  v a2s. c  o m*/

    for (String name : remapper.getObfedClasses()) {
        //            Logger.getLogger("GENDIFF").info(String.format("Evaluating path for data :%s",name));
        String fileName = name;
        String jarName = name;
        if (RESERVED_NAMES.contains(name.toUpperCase(Locale.ENGLISH))) {
            fileName = "_" + name;
        }
        File targetFile = new File(targetDir, fileName.replace('/', File.separatorChar) + ".class");
        jarName = jarName + ".class";
        if (targetFile.exists()) {
            String sourceClassName = name.replace('/', '.');
            String targetClassName = remapper.map(name).replace('/', '.');
            JarEntry entry = sourceZip.getJarEntry(jarName);
            byte[] vanillaBytes = toByteArray(sourceZip, entry);
            byte[] patchedBytes = Files.toByteArray(targetFile);

            byte[] diff = delta.compute(vanillaBytes, patchedBytes);

            ByteArrayDataOutput diffOut = ByteStreams.newDataOutput(diff.length + 50);
            // Original name
            diffOut.writeUTF(name);
            // Source name
            diffOut.writeUTF(sourceClassName);
            // Target name
            diffOut.writeUTF(targetClassName);
            // exists at original
            diffOut.writeBoolean(entry != null);
            if (entry != null) {
                diffOut.writeInt(Hashing.adler32().hashBytes(vanillaBytes).asInt());
            }
            // length of patch
            diffOut.writeInt(diff.length);
            // patch
            diffOut.write(diff);

            File target = new File(outputDir, targetClassName + ".binpatch");
            target.getParentFile().mkdirs();
            Files.write(diffOut.toByteArray(), target);
            Logger.getLogger("GENDIFF").info(String.format("Wrote patch for %s (%s) at %s", name,
                    targetClassName, target.getAbsolutePath()));
            if (kill) {
                targetFile.delete();
                Logger.getLogger("GENDIFF").info(String.format("  Deleted target: %s", targetFile.toString()));
            }
        }
    }
    sourceZip.close();
}

From source file:Satellite.java

/** Program entry point.
 * @param args program arguments (unused here)
 *///www.  j a va2s .  c o  m
public static void main(String[] args) {
    try {

        // configure Orekit
        AutoconfigurationCustom.configureOrekit();

        //  Initial state definition : date, orbit
        AbsoluteDate targetDate = new AbsoluteDate(2015, 12, 15, 2, 54, 27.000, TimeScalesFactory.getUTC());
        //*******/            double mu =  3.986004415e+14; // gravitation coefficient
        //*******/            Frame inertialFrame = FramesFactory.getEME2000(); // inertial frame for orbit definition
        //*******/            Vector3D position  = new Vector3D(-6142438.668, 3492467.560, -25767.25680);
        //*******/            Vector3D velocity  = new Vector3D(505.8479685, 942.7809215, 7435.922231);
        //*******/            PVCoordinates pvCoordinates = new PVCoordinates(position, velocity);
        //*******/            Orbit initialOrbit = new KeplerianOrbit(pvCoordinates, inertialFrame, initialDate, mu);
        String Line1 = "1 25544U 98067A   15348.82280235  .00015563  00000-0  23610-3 0  9996";
        String Line2 = "2 25544  51.6445 262.5935 0007865 276.8969 187.2494 15.54770155976144";
        TLE TLEdata = new TLE(Line1, Line2);

        // Propagator : consider a simple keplerian motion (could be more elaborate)
        Propagator TLEProp = TLEPropagator.selectExtrapolator(TLEdata);

        //            // Earth and frame
        //            Frame earthFrame = FramesFactory.getITRF(IERSConventions.IERS_2010, true);
        //            BodyShape earth = new OneAxisEllipsoid(Constants.WGS84_EARTH_EQUATORIAL_RADIUS,
        //                                                   Constants.WGS84_EARTH_FLATTENING,
        //                                                   earthFrame);
        //
        //            // Station
        //            final double longitude = FastMath.toRadians(89.);
        //            final double latitude  = FastMath.toRadians(-8);
        //            final double altitude  = 0.;
        //            final GeodeticPoint station1 = new GeodeticPoint(latitude, longitude, altitude);
        //            final TopocentricFrame sta1Frame = new TopocentricFrame(earth, station1, "station1");
        //
        //            // Event definition
        //            final double maxcheck  = 60.0;
        //            final double threshold =  0.001;
        //            final double elevation = FastMath.toRadians(5.0);
        //            final EventDetector sta1Visi =
        //                    new ElevationDetector(maxcheck, threshold, sta1Frame).
        //                    withConstantElevation(elevation).
        //                    withHandler(new VisibilityHandler());

        // Add event to be detected
        //kepler.addEventDetector(sta1Visi);

        //Propagate from the initial date to the first raising or for the fixed duration
        //SpacecraftState finalState = kepler.propagate(initialDate.shiftedBy(1500.));

        //System.out.println(" Final state : " + finalState.getDate().durationFrom(initialDate));

        String stateVector = TLEProp.propagate(targetDate)
                .getPVCoordinates(FramesFactory.getITRF(IERSConventions.IERS_2010, true)).toString();

        stateVector = stateVector.replace("{", "").replace("}", ""); //Removes brackets {}
        stateVector = stateVector.replace("(", "").replace(")", ""); //Removes parentheses ()
        stateVector = stateVector.replace("P", "").replace("V", "").replace("A", ""); //Removes P, V, A
        stateVector = stateVector.replace(" ", ""); //Removes spaces
        String[] lineData = stateVector.split(",");

        String timeStamp = new String(lineData[0]);
        double[] position = new double[] { Double.parseDouble(lineData[1]), Double.parseDouble(lineData[2]),
                Double.parseDouble(lineData[3]) };
        double[] velocity = new double[] { Double.parseDouble(lineData[4]), Double.parseDouble(lineData[5]),
                Double.parseDouble(lineData[6]) };
        double[] acceleration = new double[] { Double.parseDouble(lineData[7]), Double.parseDouble(lineData[8]),
                Double.parseDouble(lineData[9]) };

        position = Convert_To_Lat_Long(position);

        System.out.format("Latitude %.8f N%n", position[0]);
        System.out.format("Longitude %.8f E%n", position[1]);
        System.out.format("Altitude %.0f m %n", position[2]);

    } catch (OrekitException oe) {
        System.err.println(oe.getMessage());
    }
}

From source file:de.pniehus.odal.App.java

public static void main(String[] args) throws IOException {
    List<Filter> filters = new ArrayList<Filter>();
    filters.add(new RegexFilter());
    filters.add(new FileTypeFilter());
    filters.add(new KeywordFilter());
    filters.add(new BlacklistFilter());
    Profile p = parseArgs(args, filters);

    String fileName = "log-" + new Date().toString().replace(":", "-") + ".txt";
    fileName = fileName.replace(" ", "-");
    File logPath = new File(p.getLogDirectory() + fileName);

    if (!logPath.getParentFile().isDirectory() && !logPath.getParentFile().mkdirs()) {
        logPath = new File(fileName);
    }//from  ww  w. j av  a 2s.  c o m

    if (logPath.getParentFile().canWrite() || logPath.getParentFile().setWritable(true)) {
        SimpleLoggingSetup.configureRootLogger(logPath.getAbsolutePath(), p.getLogLevel(), !p.isSilent());
    } else {
        Logger root = Logger.getLogger("");

        for (Handler h : root.getHandlers()) { // Removing default console handlers
            if (h instanceof ConsoleHandler) {
                root.removeHandler(h);
            }
        }

        ConsolePrintLogHandler cplh = new ConsolePrintLogHandler();
        cplh.setFormatter(new ScribblerLogFormat(SimpleLoggingSetup.DEFAULT_DATE_FORMAT));
        root.addHandler(cplh);

        System.out.println("Unable to create log: insufficient permissions!");

    }

    Logger.getLogger("").setLevel(p.getLogLevel());
    mainLogger = Logger.getLogger(App.class.getCanonicalName());
    untrustedSSLSetup();
    mainLogger.info("Successfully intitialized ODAL");
    if (!p.isLogging())
        mainLogger.setLevel(Level.OFF);
    if (p.isWindowsConsoleMode() && !p.isLogging()) {
        Logger root = Logger.getLogger("");
        for (Handler h : root.getHandlers()) {
            if (h instanceof FileHandler) {
                root.removeHandler(h); // Removes FileHandler to allow console output through logging
            }
        }
    }
    OdalGui ogui = new OdalGui(p, filters);
}

From source file:evalita.q4faq.baseline.Index.java

/**
 * @param args the command line arguments
 */// w w w .j a  va2 s .  c o m
public static void main(String[] args) {
    try {
        if (args.length > 1) {
            Reader in = new FileReader(args[0]);
            IndexWriterConfig config = new IndexWriterConfig(Version.LATEST, new ItalianAnalyzer());
            IndexWriter writer = new IndexWriter(FSDirectory.open(new File(args[1])), config);
            Iterable<CSVRecord> records = CSVFormat.EXCEL.withHeader().withDelimiter(';').parse(in);
            for (CSVRecord record : records) {
                int id = Integer.parseInt(record.get("id"));
                String question = record.get("question");
                String answer = record.get("answer");
                String tag = record.get("tag");
                Document doc = new Document();
                doc.add(new StringField("id", String.valueOf(id), Field.Store.YES));
                doc.add(new TextField("question", question, Field.Store.NO));
                doc.add(new TextField("answer", answer, Field.Store.NO));
                doc.add(new TextField("tag", tag.replace(",", " "), Field.Store.NO));
                writer.addDocument(doc);
            }
            writer.close();
        } else {
            throw new IllegalArgumentException("Number of arguments not valid");
        }
    } catch (IOException | IllegalArgumentException ex) {
        Logger.getLogger(Index.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkScalaInterpreterTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    final SetOnce<IBucketLogger> logger = new SetOnce<>();

    try {//w w w  . j  a  va2  s .  c o  m
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        logger.set(context.getLogger(context.getBucket()));

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        //INFO:
        System.out.println("Starting SparkScalaInterpreterTopology logging=" + logger.optional().isPresent());

        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.INFO,
                    ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                            "Starting SparkScalaInterpreterTopology.{0}",
                            Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
        });

        final SparkTopologyConfigBean job_config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();
        final String scala_script = Optional.ofNullable(job_config.script()).orElse("");

        final String wrapper_script = IOUtils.toString(
                SparkScalaInterpreterTopology.class.getClassLoader().getResourceAsStream("ScriptRunner.scala"),
                "UTF-8");
        final String to_compile = wrapper_script.replace("USER_SCRIPT", scala_script);
        final SparkCompilerService scs = new SparkCompilerService();
        final Tuple2<ClassLoader, Object> o = scs.buildClass(to_compile, "ScriptRunner", logger.optional());

        Thread.currentThread().setContextClassLoader(o._1());

        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        SparkConf spark_context = new SparkConf().setAppName("SparkPassthroughTopology");

        final long streaming_batch_interval = (long) spark_context
                .getInt(SparkTopologyConfigBean.STREAMING_BATCH_INTERVAL, 10);

        // MAIN PROCESSING

        final Method m = o._2().getClass().getMethod("runScript", SparkScriptEngine.class);

        //DEBUG
        //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map)

        boolean is_streaming = context.getJob().map(j -> j.analytic_type())
                .map(t -> MasterEnrichmentType.streaming == t).orElse(false);
        final Either<JavaSparkContext, JavaStreamingContext> jsc = Lambdas.get(() -> {
            return is_streaming
                    ? Either.<JavaSparkContext, JavaStreamingContext>right(new JavaStreamingContext(
                            spark_context, Durations.seconds(streaming_batch_interval)))
                    : Either.<JavaSparkContext, JavaStreamingContext>left(new JavaSparkContext(spark_context));
        });
        try {
            final JavaSparkContext jsc_batch = jsc.either(l -> l, r -> r.sparkContext());

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc_batch, Collections.emptySet());

            final Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>> streaming_inputs = jsc
                    .<Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>>either(
                            l -> HashMultimap
                                    .<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>create(),
                            r -> SparkTechnologyUtils.buildStreamingSparkInputs(context, test_spec, r,
                                    Collections.emptySet()));

            final SparkScriptEngine script_engine_bridge = new SparkScriptEngine(context, inputs,
                    streaming_inputs, test_spec, jsc_batch, jsc.either(l -> null, r -> r), job_config);

            // Add driver and generated JARs to path:
            jsc_batch.addJar(LiveInjector.findPathJar(o._2().getClass()));

            m.invoke(o._2(), script_engine_bridge);

            jsc.either(l -> {
                l.stop();
                return null;
            }, r -> {
                r.stop();
                return null;
            });

            logger.optional().ifPresent(l -> {
                l.inefficientLog(Level.INFO,
                        ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                                "Stopping SparkScalaInterpreterTopology.{0}",
                                Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
            });

            //INFO:
            System.out.println("Finished interpreter");
        } finally {
            jsc.either(l -> {
                l.close();
                return null;
            }, r -> {
                r.close();
                return null;
            });
        }
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
    } catch (Throwable t) {
        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.ERROR, ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology",
                    "main",
                    ErrorUtils.getLongForm("Error executing SparkScalaInterpreterTopology.unknown: {0}", t)));
        });

        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
        System.exit(-1);
    }
}

From source file:com.jiangyifen.ec2.globaldata.license.LicenseManagerMain.java

/**
 * ?License//www  . j  a va 2s. c o  m
 * 
 * @param args
 */
public static void main(String[] args) {
    // Mac ?
    // String mac="00-50-56-C0-00-08"; // jrh 160
    String mac = "C8:1F:66:B9:26:96";
    mac = "00-50-56-C0-00-08";
    mac = "90-B1-1C-82-D7-1F";
    mac = mac.replace(":", "-");

    // 
    String date = "2015-08-25 00:00:00";
    // ???
    String count = "35";
    System.out.println(generateLicense(mac, date, count));
    System.out.println();
    System.out.println();

    jrh_license_test();

}

From source file:com.alexoree.jenkins.Main.java

public static void main(String[] args) throws Exception {
    // create Options object
    Options options = new Options();

    options.addOption("t", false, "throttle the downloads, waits 5 seconds in between each d/l");

    // automatically generate the help statement
    HelpFormatter formatter = new HelpFormatter();
    formatter.printHelp("jenkins-sync", options);

    CommandLineParser parser = new DefaultParser();
    CommandLine cmd = parser.parse(options, args);
    boolean throttle = cmd.hasOption("t");

    String plugins = "https://updates.jenkins-ci.org/latest/";
    List<String> ps = new ArrayList<String>();
    Document doc = Jsoup.connect(plugins).get();
    for (Element file : doc.select("td a")) {
        //System.out.println(file.attr("href"));
        if (file.attr("href").endsWith(".hpi") || file.attr("href").endsWith(".war")) {
            ps.add(file.attr("href"));
        }/*from ww  w  .ja va2s  . c  o  m*/
    }

    File root = new File(".");
    //https://updates.jenkins-ci.org/latest/AdaptivePlugin.hpi
    new File("./latest").mkdirs();

    //output zip file
    String zipFile = "jenkinsSync.zip";
    // create byte buffer
    byte[] buffer = new byte[1024];
    FileOutputStream fos = new FileOutputStream(zipFile);
    ZipOutputStream zos = new ZipOutputStream(fos);

    //download the plugins
    for (int i = 0; i < ps.size(); i++) {
        System.out.println("[" + i + "/" + ps.size() + "] downloading " + plugins + ps.get(i));
        String outputFile = download(root.getAbsolutePath() + "/latest/" + ps.get(i), plugins + ps.get(i));

        FileInputStream fis = new FileInputStream(outputFile);
        // begin writing a new ZIP entry, positions the stream to the start of the entry data
        zos.putNextEntry(new ZipEntry(outputFile.replace(root.getAbsolutePath(), "")
                .replace("updates.jenkins-ci.org/", "").replace("https:/", "")));
        int length;
        while ((length = fis.read(buffer)) > 0) {
            zos.write(buffer, 0, length);
        }
        zos.closeEntry();
        fis.close();
        if (throttle)
            Thread.sleep(WAIT);
        new File(root.getAbsolutePath() + "/latest/" + ps.get(i)).deleteOnExit();
    }

    //download the json metadata
    plugins = "https://updates.jenkins-ci.org/";
    ps = new ArrayList<String>();
    doc = Jsoup.connect(plugins).get();
    for (Element file : doc.select("td a")) {
        //System.out.println(file.attr("href"));
        if (file.attr("href").endsWith(".json")) {
            ps.add(file.attr("href"));
        }
    }
    for (int i = 0; i < ps.size(); i++) {
        download(root.getAbsolutePath() + "/" + ps.get(i), plugins + ps.get(i));

        FileInputStream fis = new FileInputStream(root.getAbsolutePath() + "/" + ps.get(i));
        // begin writing a new ZIP entry, positions the stream to the start of the entry data
        zos.putNextEntry(new ZipEntry(plugins + ps.get(i)));
        int length;
        while ((length = fis.read(buffer)) > 0) {
            zos.write(buffer, 0, length);
        }
        zos.closeEntry();
        fis.close();
        new File(root.getAbsolutePath() + "/" + ps.get(i)).deleteOnExit();
        if (throttle)
            Thread.sleep(WAIT);
    }

    // close the ZipOutputStream
    zos.close();
}

From source file:com.almende.eve.deploy.Boot.java

/**
 * The default agent booter. It takes an EVE yaml file and creates all
 * agents mentioned in the "agents" section.
 * /*from  w  w w. j a va 2  s  .com*/
 * @param args
 *            Single argument: args[0] -> Eve yaml
 */
public static void main(final String[] args) {
    if (args.length == 0) {
        LOG.warning("Missing argument pointing to yaml file:");
        LOG.warning("Usage: java -jar <jarfile> eve.yaml");
        return;
    }
    final ClassLoader cl = new ClassLoader() {
        @Override
        protected Class<?> findClass(final String name) throws ClassNotFoundException {
            Class<?> result = null;
            try {
                result = super.findClass(name);
            } catch (ClassNotFoundException cne) {
            }
            if (result == null) {
                FileInputStream fi = null;
                try {

                    String path = name.replace('.', '/');
                    fi = new FileInputStream(System.getProperty("user.dir") + "/" + path + ".class");
                    byte[] classBytes = new byte[fi.available()];
                    fi.read(classBytes);
                    fi.close();
                    return defineClass(name, classBytes, 0, classBytes.length);
                } catch (Exception e) {
                    LOG.log(Level.WARNING, "Failed to load class:", e);
                }
            }
            if (result == null) {
                throw new ClassNotFoundException(name);
            }
            return result;
        }
    };
    String configFileName = args[0];
    try {
        InputStream is = new FileInputStream(new File(configFileName));
        boot(is, cl);

    } catch (FileNotFoundException e) {
        LOG.log(Level.WARNING, "Couldn't find configfile:" + configFileName, e);
        return;
    }

}

From source file:Pathway2RDFv2.java

public static void main(String[] args) throws ParserConfigurationException, SAXException, IOException,
        ServiceException, ClassNotFoundException, IDMapperException, ParseException {

    int softwareVersion = 0;
    int schemaVersion = 0;
    int latestRevision = 0;

    BioDataSource.init();//from  w w w .jav  a2  s . c o m
    Class.forName("org.bridgedb.rdb.IDMapperRdb");
    File dir = new File("/Users/andra/Downloads/bridge");
    File[] bridgeDbFiles = dir.listFiles();
    IDMapperStack mapper = new IDMapperStack();
    for (File bridgeDbFile : bridgeDbFiles) {
        System.out.println(bridgeDbFile.getAbsolutePath());
        mapper.addIDMapper("idmapper-pgdb:" + bridgeDbFile.getAbsolutePath());
    }

    Model bridgeDbmodel = ModelFactory.createDefaultModel();
    InputStream in = new FileInputStream("/tmp/BioDataSource.ttl");
    bridgeDbmodel.read(in, "", "TURTLE");

    WikiPathwaysClient client = new WikiPathwaysClient(
            new URL("http://www.wikipathways.org/wpi/webservice/webservice.php"));

    basicCalls.printMemoryStatus();

    //Map wikipathway organisms to NCBI organisms
    HashMap<String, String> organismTaxonomy = wpRelatedCalls.getOrganismsTaxonomyMapping();
    //HashMap<String, String> miriamSources = new HashMap<String, String>();
    //      HashMap<String, Str ing> miriamLinks = basicCalls.getMiriamUriBridgeDb();

    //Document wikiPathwaysDom = basicCalls.openXmlFile(args[0]);
    Document wikiPathwaysDom = basicCalls.openXmlFile("/tmp/WpGPML.xml");

    //initiate the Jena model to be populated
    Model model = ModelFactory.createDefaultModel();
    Model voidModel = ModelFactory.createDefaultModel();

    voidModel.setNsPrefix("xsd", XSD.getURI());
    voidModel.setNsPrefix("void", Void.getURI());
    voidModel.setNsPrefix("wprdf", "http://rdf.wikipathways.org/");
    voidModel.setNsPrefix("pav", Pav.getURI());
    voidModel.setNsPrefix("prov", Prov.getURI());
    voidModel.setNsPrefix("dcterms", DCTerms.getURI());
    voidModel.setNsPrefix("biopax", Biopax_level3.getURI());
    voidModel.setNsPrefix("gpml", Gpml.getURI());
    voidModel.setNsPrefix("wp", Wp.getURI());
    voidModel.setNsPrefix("foaf", FOAF.getURI());
    voidModel.setNsPrefix("hmdb", "http://identifiers.org/hmdb/");
    voidModel.setNsPrefix("freq", Freq.getURI());
    voidModel.setNsPrefix("dc", DC.getURI());
    setModelPrefix(model);

    //Populate void.ttl
    Calendar now = Calendar.getInstance();
    Literal nowLiteral = voidModel.createTypedLiteral(now);
    Literal titleLiteral = voidModel.createLiteral("WikiPathways-RDF VoID Description", "en");
    Literal descriptionLiteral = voidModel
            .createLiteral("This is the VoID description for a WikiPathwyas-RDF dataset.", "en");
    Resource voidBase = voidModel.createResource("http://rdf.wikipathways.org/");
    Resource identifiersOrg = voidModel.createResource("http://identifiers.org");
    Resource wpHomeBase = voidModel.createResource("http://www.wikipathways.org/");
    Resource authorResource = voidModel
            .createResource("http://semantics.bigcat.unimaas.nl/figshare/search_author.php?author=waagmeester");
    Resource apiResource = voidModel
            .createResource("http://www.wikipathways.org/wpi/webservice/webservice.php");
    Resource mainDatadump = voidModel.createResource("http://rdf.wikipathways.org/wpContent.ttl.gz");
    Resource license = voidModel.createResource("http://creativecommons.org/licenses/by/3.0/");
    Resource instituteResource = voidModel.createResource("http://dbpedia.org/page/Maastricht_University");
    voidBase.addProperty(RDF.type, Void.Dataset);
    voidBase.addProperty(DCTerms.title, titleLiteral);
    voidBase.addProperty(DCTerms.description, descriptionLiteral);
    voidBase.addProperty(FOAF.homepage, wpHomeBase);
    voidBase.addProperty(DCTerms.license, license);
    voidBase.addProperty(Void.uriSpace, voidBase);
    voidBase.addProperty(Void.uriSpace, identifiersOrg);
    voidBase.addProperty(Pav.importedBy, authorResource);
    voidBase.addProperty(Pav.importedFrom, apiResource);
    voidBase.addProperty(Pav.importedOn, nowLiteral);
    voidBase.addProperty(Void.dataDump, mainDatadump);
    voidBase.addProperty(Voag.frequencyOfChange, Freq.Irregular);
    voidBase.addProperty(Pav.createdBy, authorResource);
    voidBase.addProperty(Pav.createdAt, instituteResource);
    voidBase.addLiteral(Pav.createdOn, nowLiteral);
    voidBase.addProperty(DCTerms.subject, Biopax_level3.Pathway);
    voidBase.addProperty(Void.exampleResource,
            voidModel.createResource("http://identifiers.org/ncbigene/2678"));
    voidBase.addProperty(Void.exampleResource,
            voidModel.createResource("http://identifiers.org/pubmed/15215856"));
    voidBase.addProperty(Void.exampleResource,
            voidModel.createResource("http://identifiers.org/hmdb/HMDB02005"));
    voidBase.addProperty(Void.exampleResource, voidModel.createResource("http://rdf.wikipathways.org/WP15"));
    voidBase.addProperty(Void.exampleResource,
            voidModel.createResource("http://identifiers.org/obo.chebi/17242"));

    for (String organism : organismTaxonomy.values()) {
        voidBase.addProperty(DCTerms.subject,
                voidModel.createResource("http://dbpedia.org/page/" + organism.replace(" ", "_")));
    }
    voidBase.addProperty(Void.vocabulary, Biopax_level3.NAMESPACE);
    voidBase.addProperty(Void.vocabulary, voidModel.createResource(Wp.getURI()));
    voidBase.addProperty(Void.vocabulary, voidModel.createResource(Gpml.getURI()));
    voidBase.addProperty(Void.vocabulary, FOAF.NAMESPACE);
    voidBase.addProperty(Void.vocabulary, Pav.NAMESPACE);
    //Custom Properties
    String baseUri = "http://rdf.wikipathways.org/";
    NodeList pathwayElements = wikiPathwaysDom.getElementsByTagName("Pathway");

    //BioDataSource.init();
    for (int i = 0; i < pathwayElements.getLength(); i++) {
        Model pathwayModel = createPathwayModel();
        String wpId = pathwayElements.item(i).getAttributes().getNamedItem("identifier").getTextContent();
        String revision = pathwayElements.item(i).getAttributes().getNamedItem("revision").getTextContent();
        String pathwayOrganism = "";
        if (pathwayElements.item(i).getAttributes().getNamedItem("Organism") != null)
            pathwayOrganism = pathwayElements.item(i).getAttributes().getNamedItem("Organism").getTextContent()
                    .trim();
        if (Integer.valueOf(revision) > latestRevision) {
            latestRevision = Integer.valueOf(revision);
        }
        File f = new File("/tmp/" + args[0] + "/" + wpId + "_r" + revision + ".ttl");
        System.out.println(f.getName());
        if (!f.exists()) {

            Resource voidPwResource = wpRelatedCalls.addVoidTriples(voidModel, voidBase,
                    pathwayElements.item(i), client);
            Resource pwResource = wpRelatedCalls.addPathwayLevelTriple(pathwayModel, pathwayElements.item(i),
                    organismTaxonomy);

            // Get the comments
            NodeList commentElements = ((Element) pathwayElements.item(i)).getElementsByTagName("Comment");
            wpRelatedCalls.addCommentTriples(pathwayModel, pwResource, commentElements, wpId, revision);
            // Get the Groups
            NodeList groupElements = ((Element) pathwayElements.item(i)).getElementsByTagName("Group");
            for (int n = 0; n < groupElements.getLength(); n++) {
                wpRelatedCalls.addGroupTriples(pathwayModel, pwResource, groupElements.item(n), wpId, revision);
            }
            // Get all the Datanodes
            NodeList dataNodesElement = ((Element) pathwayElements.item(i)).getElementsByTagName("DataNode");
            for (int j = 0; j < dataNodesElement.getLength(); j++) {
                wpRelatedCalls.addDataNodeTriples(pathwayModel, pwResource, dataNodesElement.item(j), wpId,
                        revision, bridgeDbmodel, mapper);
            }
            // Get all the lines
            NodeList linesElement = ((Element) pathwayElements.item(i)).getElementsByTagName("Line");
            for (int k = 0; k < linesElement.getLength(); k++) {
                wpRelatedCalls.addLineTriples(pathwayModel, pwResource, linesElement.item(k), wpId, revision);
            }
            //Get all the labels
            NodeList labelsElement = ((Element) pathwayElements.item(i)).getElementsByTagName("Label");
            for (int l = 0; l < labelsElement.getLength(); l++) {
                wpRelatedCalls.addLabelTriples(pathwayModel, pwResource, labelsElement.item(l), wpId, revision);
            }
            NodeList referenceElements = ((Element) pathwayElements.item(i))
                    .getElementsByTagName("bp:PublicationXref");
            for (int m = 0; m < referenceElements.getLength(); m++) {
                wpRelatedCalls.addReferenceTriples(pathwayModel, pwResource, referenceElements.item(m), wpId,
                        revision);
            }
            NodeList referenceElements2 = ((Element) pathwayElements.item(i))
                    .getElementsByTagName("bp:publicationXref");
            for (int m = 0; m < referenceElements2.getLength(); m++) {
                wpRelatedCalls.addReferenceTriples(pathwayModel, pwResource, referenceElements2.item(m), wpId,
                        revision);
            }
            NodeList referenceElements3 = ((Element) pathwayElements.item(i))
                    .getElementsByTagName("bp:PublicationXRef");
            for (int m = 0; m < referenceElements3.getLength(); m++) {
                wpRelatedCalls.addReferenceTriples(pathwayModel, pwResource, referenceElements3.item(m), wpId,
                        revision);
            }

            NodeList ontologyElements = ((Element) pathwayElements.item(i))
                    .getElementsByTagName("bp:openControlledVocabulary");
            for (int n = 0; n < ontologyElements.getLength(); n++) {
                wpRelatedCalls.addPathwayOntologyTriples(pathwayModel, pwResource, ontologyElements.item(n));
            }
            System.out.println(wpId);
            basicCalls.saveRDF2File(pathwayModel, "/tmp/" + args[0] + "/" + wpId + "_r" + revision + ".ttl",
                    "TURTLE");

            model.add(pathwayModel);
            pathwayModel.removeAll();
        }
    }
    Date myDate = new Date();
    SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
    String myDateString = sdf.format(myDate);
    FileUtils.writeStringToFile(new File("latestVersion.txt"),
            "v" + schemaVersion + "." + softwareVersion + "." + latestRevision + "_" + myDateString);
    basicCalls.saveRDF2File(model, "/tmp/wpContent_v" + schemaVersion + "." + softwareVersion + "."
            + latestRevision + "_" + myDateString + ".ttl", "TURTLE");
    basicCalls.saveRDF2File(voidModel, "/tmp/void.ttl", "TURTLE");
}