Example usage for java.util.regex Pattern quote

List of usage examples for java.util.regex Pattern quote

Introduction

In this page you can find the example usage for java.util.regex Pattern quote.

Prototype

public static String quote(String s) 

Source Link

Document

Returns a literal pattern String for the specified String .

Usage

From source file:Load_RSS_p.java

public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header,
        final serverObjects post, final serverSwitch env) {

    final serverObjects prop = new serverObjects();
    final Switchboard sb = (Switchboard) env;

    final String collection = post == null ? "user"
            : CommonPattern.SPACE.matcher(post.get("collection", "user").trim()).replaceAll("");
    Map<String, Pattern> collections = CrawlProfile.collectionParser(collection);
    boolean collectionEnabled = sb.index.fulltext().getDefaultConfiguration().isEmpty()
            || sb.index.fulltext().getDefaultConfiguration().contains(CollectionSchema.collection_sxt);
    prop.put("showload_collectionEnabled", collectionEnabled ? 1 : 0);
    prop.put("showload_collection", collection);
    prop.put("showload", 0);
    prop.put("showitems", 0);
    prop.put("shownewfeeds", 0);
    prop.put("showscheduledfeeds", 0);
    prop.put("url", "");
    prop.put("showerrmsg", 0);

    if (post != null && post.containsKey("removeSelectedFeedsNewList")) {
        for (final Map.Entry<String, String> entry : post.entrySet()) {
            if (entry.getValue().startsWith(CHECKBOX_ITEM_PREFIX))
                try {
                    sb.tables.delete("rss",
                            entry.getValue().substring(CHECKBOX_ITEM_PREFIX.length()).getBytes());
                } catch (final IOException e) {
                    ConcurrentLog.logException(e);
                }/*from  w  w  w. j a  va 2s.  c  om*/
        }
    }

    if (post != null && post.containsKey("removeAllFeedsNewList"))
        try {
            final Iterator<Row> plainIterator = sb.tables.iterator("rss");
            Row row;
            String messageurl;
            final List<byte[]> d = new ArrayList<byte[]>();
            while (plainIterator.hasNext()) {
                row = plainIterator.next();
                if (row == null)
                    continue;
                messageurl = row.get("url", "");
                if (messageurl.isEmpty())
                    continue;
                final byte[] api_pk = row.get("api_pk");
                final Row r = api_pk == null ? null : sb.tables.select("api", api_pk);
                if (r == null || !r.get("comment", "").matches(".*" + Pattern.quote(messageurl) + ".*")) {
                    d.add(row.getPK());
                }
            }
            for (final byte[] pk : d) {
                sb.tables.delete("rss", pk);
            }
        } catch (final IOException e) {
            ConcurrentLog.logException(e);
        } catch (final SpaceExceededException e) {
            ConcurrentLog.logException(e);
        }

    if (post != null && post.containsKey("removeSelectedFeedsScheduler")) {
        for (final Map.Entry<String, String> entry : post.entrySet()) {
            if (entry.getValue().startsWith(CHECKBOX_ITEM_PREFIX))
                try {
                    final byte[] pk = entry.getValue().substring(CHECKBOX_ITEM_PREFIX.length()).getBytes();
                    final Row rssRow = sb.tables.select("rss", pk);
                    final byte[] schedulerPK = rssRow.get("api_pk", (byte[]) null);
                    if (schedulerPK != null)
                        sb.tables.delete("api", schedulerPK);
                    rssRow.remove("api_pk");
                    sb.tables.insert("rss", pk, rssRow);
                } catch (final IOException e) {
                    ConcurrentLog.logException(e);
                } catch (final SpaceExceededException e) {
                    ConcurrentLog.logException(e);
                }
        }
    }

    if (post != null && post.containsKey("removeAllFeedsScheduler"))
        try {
            final Iterator<Row> plainIterator = sb.tables.iterator("rss");
            Row row;
            String messageurl;
            final List<byte[]> d = new ArrayList<byte[]>();
            while (plainIterator.hasNext()) {
                row = plainIterator.next();
                if (row == null)
                    continue;
                messageurl = row.get("url", "");
                if (messageurl.isEmpty())
                    continue;
                final byte[] api_pk = row.get("api_pk");
                final Row r = api_pk == null ? null : sb.tables.select("api", api_pk);
                if (r != null && r.get("comment", "").matches(".*" + Pattern.quote(messageurl) + ".*")) {
                    d.add(row.getPK());
                }
            }
            for (final byte[] pk : d) {
                final Row rssRow = sb.tables.select("rss", pk);
                final byte[] schedulerPK = rssRow.get("api_pk", (byte[]) null);
                if (schedulerPK != null)
                    sb.tables.delete("api", schedulerPK);
                rssRow.remove("api_pk");
                sb.tables.insert("rss", pk, rssRow);
            }
        } catch (final IOException e) {
            ConcurrentLog.logException(e);
        } catch (final SpaceExceededException e) {
            ConcurrentLog.logException(e);
        }

    if (post != null && post.containsKey("addSelectedFeedScheduler")) {
        ClientIdentification.Agent agent = ClientIdentification
                .getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));
        for (final Map.Entry<String, String> entry : post.entrySet()) {
            if (entry.getValue().startsWith(CHECKBOX_ITEM_PREFIX)) {
                Row row;
                try {
                    final byte[] pk = entry.getValue().substring(CHECKBOX_ITEM_PREFIX.length()).getBytes();
                    row = sb.tables.select("rss", pk);
                } catch (final IOException e) {
                    ConcurrentLog.logException(e);
                    continue;
                } catch (final SpaceExceededException e) {
                    ConcurrentLog.logException(e);
                    continue;
                }
                DigestURL url = null;
                try {
                    url = new DigestURL(row.get("url", ""));
                } catch (final MalformedURLException e) {
                    ConcurrentLog.warn("Load_RSS",
                            "malformed url '" + row.get("url", "") + "': " + e.getMessage());
                    continue;
                }
                // load feeds concurrently to get better responsibility in web interface
                new RSSLoader(sb, url, collections, agent).start();
            }
        }
    }

    if (post == null || (post != null && (post.containsKey("addSelectedFeedScheduler")
            || post.containsKey("removeSelectedFeedsNewList") || post.containsKey("removeAllFeedsNewList")
            || post.containsKey("removeSelectedFeedsScheduler")
            || post.containsKey("removeAllFeedsScheduler")))) {
        try {
            // get list of primary keys from the api table with scheduled feed loading requests
            Tables.Row row;
            String messageurl;

            // check feeds
            int newc = 0, apic = 0;
            final Iterator<Row> plainIterator = sb.tables.iterator("rss");
            while (plainIterator.hasNext()) {
                row = plainIterator.next();
                if (row == null)
                    continue;
                messageurl = row.get("url", "");
                if (messageurl.isEmpty())
                    continue;
                // get referrer
                final DigestURL referrer = sb.getURL(row.get("referrer", "").getBytes());
                // check if feed is registered in scheduler
                final byte[] api_pk = row.get("api_pk");
                final Row r = api_pk == null ? null : sb.tables.select("api", api_pk);
                if (r != null && r.get("comment", "").matches(".*" + Pattern.quote(messageurl) + ".*")) {
                    // this is a recorded entry
                    final Date date_next_exec = r.get(WorkTables.TABLE_API_COL_DATE_NEXT_EXEC, (Date) null);
                    prop.put("showscheduledfeeds_list_" + apic + "_pk", UTF8.String(row.getPK()));
                    prop.put("showscheduledfeeds_list_" + apic + "_count", apic);
                    prop.put("showscheduledfeeds_list_" + apic + "_rss",
                            MultiProtocolURL.escape(messageurl).toString());
                    prop.putXML("showscheduledfeeds_list_" + apic + "_title", row.get("title", ""));
                    prop.putXML("showscheduledfeeds_list_" + apic + "_referrer",
                            referrer == null ? "#" : referrer.toNormalform(true));
                    prop.put("showscheduledfeeds_list_" + apic + "_recording",
                            DateFormat.getDateTimeInstance().format(row.get("recording_date", new Date())));
                    prop.put("showscheduledfeeds_list_" + apic + "_lastload",
                            DateFormat.getDateTimeInstance().format(row.get("last_load_date", new Date())));
                    prop.put("showscheduledfeeds_list_" + apic + "_nextload", date_next_exec == null ? ""
                            : DateFormat.getDateTimeInstance().format(date_next_exec));
                    prop.put("showscheduledfeeds_list_" + apic + "_lastcount", row.get("last_load_count", 0));
                    prop.put("showscheduledfeeds_list_" + apic + "_allcount", row.get("all_load_count", 0));
                    prop.put("showscheduledfeeds_list_" + apic + "_updperday", row.get("avg_upd_per_day", 0));
                    apic++;
                } else {
                    // this is a new entry
                    prop.put("shownewfeeds_list_" + newc + "_pk", UTF8.String(row.getPK()));
                    prop.put("shownewfeeds_list_" + newc + "_count", newc);
                    prop.putXML("shownewfeeds_list_" + newc + "_rss", messageurl);
                    prop.putXML("shownewfeeds_list_" + newc + "_title", row.get("title", ""));
                    prop.putXML("shownewfeeds_list_" + newc + "_referrer",
                            referrer == null ? "" : referrer.toNormalform(true));
                    prop.put("shownewfeeds_list_" + newc + "_recording",
                            DateFormat.getDateTimeInstance().format(row.get("recording_date", new Date())));
                    newc++;
                }
                if (apic > 1000 || newc > 1000)
                    break;
            }
            prop.put("showscheduledfeeds_list", apic);
            prop.put("showscheduledfeeds_num", apic);
            prop.put("showscheduledfeeds", apic > 0 ? apic : 0);
            prop.put("shownewfeeds_list", newc);
            prop.put("shownewfeeds_num", newc);
            prop.put("shownewfeeds", newc > 0 ? 1 : 0);
        } catch (final IOException e) {
            ConcurrentLog.logException(e);
        } catch (final SpaceExceededException e) {
            ConcurrentLog.logException(e);
        }

        return prop;
    }

    prop.put("url", post.get("url", ""));

    int repeat_time = post.getInt("repeat_time", -1);
    final String repeat_unit = post.get("repeat_unit", "seldays"); // selminutes, selhours, seldays
    if (!"on".equals(post.get("repeat", "off")) && repeat_time > 0)
        repeat_time = -1;

    boolean record_api = false;

    DigestURL url = null;
    try {
        url = post.containsKey("url") ? new DigestURL(post.get("url", "")) : null;
    } catch (final MalformedURLException e) {
        ConcurrentLog.warn("Load_RSS", "url not well-formed: '" + post.get("url", "") + "'");
    }

    ClientIdentification.Agent agent = post == null ? ClientIdentification.yacyInternetCrawlerAgent
            : ClientIdentification
                    .getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));

    // if we have an url then try to load the rss
    RSSReader rss = null;
    if (url != null)
        try {
            prop.put("url", url.toNormalform(true));
            final Response response = sb.loader.load(sb.loader.request(url, true, false), CacheStrategy.NOCACHE,
                    Integer.MAX_VALUE, BlacklistType.CRAWLER, agent);
            final byte[] resource = response == null ? null : response.getContent();
            rss = resource == null ? null : RSSReader.parse(RSSFeed.DEFAULT_MAXSIZE, resource);
        } catch (final IOException e) {
            ConcurrentLog.warn("Load_RSS", e.getMessage());
            prop.put("showerrmsg", 1);
            prop.put("showerrmsg_msgtxt", "no valid response from given url");
            return prop; // if no response nothing to process further
        }

    // index all selected items: description only
    if (rss != null && post.containsKey("indexSelectedItemContent")) {
        final RSSFeed feed = rss.getFeed();
        final Map<String, DigestURL> hash2UrlMap = new HashMap<String, DigestURL>();
        loop: for (final Map.Entry<String, String> entry : post.entrySet()) {
            if (entry.getValue().startsWith(CHECKBOX_ITEM_PREFIX)) {
                /* Process selected item links */
                final RSSMessage message = feed
                        .getMessage(entry.getValue().substring(CHECKBOX_ITEM_PREFIX.length()));
                if (message == null || StringUtils.isBlank(message.getLink())) {
                    /* Link element is optional in RSS 2.0 and Atom */
                    continue loop;
                }
                DigestURL messageUrl;
                try {
                    messageUrl = new DigestURL(message.getLink());
                } catch (MalformedURLException e) {
                    ConcurrentLog.warn("Load_RSS", "Malformed feed item link URL : " + message.getLink());
                    continue loop;
                }
                if (RSSLoader.indexTriggered.containsKey(messageUrl.hash())) {
                    continue loop;
                }
                hash2UrlMap.put(ASCII.String(messageUrl.hash()), messageUrl);
            } else if (entry.getValue().startsWith(CHECKBOX_MEDIA_ITEM_PREFIX)) {
                /* Process selected item enclosure (media) links */
                final RSSMessage message = feed
                        .getMessage(entry.getValue().substring(CHECKBOX_MEDIA_ITEM_PREFIX.length()));
                if (message == null || StringUtils.isBlank(message.getEnclosure())) {
                    /* Enclosure element is optional */
                    continue loop;
                }
                DigestURL mediaUrl;
                try {
                    mediaUrl = new DigestURL(message.getEnclosure());
                } catch (MalformedURLException e) {
                    ConcurrentLog.warn("Load_RSS",
                            "Malformed feed item enclosure URL : " + message.getEnclosure());
                    continue loop;
                }
                if (RSSLoader.indexTriggered.containsKey(mediaUrl.hash())) {
                    continue loop;
                }
                hash2UrlMap.put(ASCII.String(mediaUrl.hash()), mediaUrl);
            }
        }

        final List<DigestURL> urlsToIndex = new ArrayList<DigestURL>();
        loop: for (final Map.Entry<String, DigestURL> entry : hash2UrlMap.entrySet()) {
            try {
                final DigestURL messageUrl = entry.getValue();
                HarvestProcess harvestProcess = sb.urlExists(ASCII.String(messageUrl.hash()));
                if (harvestProcess != null) {
                    continue loop;
                }
                urlsToIndex.add(messageUrl);
                RSSLoader.indexTriggered.insertIfAbsent(messageUrl.hash(), new Date());
            } catch (final IOException e) {
                ConcurrentLog.logException(e);
            }
        }

        sb.addToIndex(urlsToIndex, null, null, collections, true);
    }

    if (rss != null && post.containsKey("indexAllItemContent")) {
        record_api = true;
        final RSSFeed feed = rss.getFeed();
        RSSLoader.indexAllRssFeed(sb, url, feed, collections);
    }

    if (record_api && rss != null && rss.getFeed() != null && rss.getFeed().getChannel() != null) {
        // record API action
        RSSLoader.recordAPI(sb, post.get(WorkTables.TABLE_API_COL_APICALL_PK, null), url, rss.getFeed(),
                repeat_time, repeat_unit);
    }

    // show items from rss
    if (rss != null) {
        prop.put("showitems", 1);
        final RSSFeed feed = rss.getFeed();
        final RSSMessage channel = feed.getChannel();
        prop.putHTML("showitems_title", channel == null ? "" : channel.getTitle());
        String author = channel == null ? "" : channel.getAuthor();
        if (author == null || author.isEmpty())
            author = channel == null ? "" : channel.getCopyright();
        Date pubDate = channel == null ? null : channel.getPubDate();
        prop.putHTML("showitems_author", author == null ? "" : author);
        prop.putHTML("showitems_description", channel == null ? "" : channel.getDescriptions().toString());
        prop.putHTML("showitems_language", channel == null ? "" : channel.getLanguage());
        prop.putHTML("showitems_date",
                (pubDate == null) ? "" : DateFormat.getDateTimeInstance().format(pubDate));
        prop.putHTML("showitems_ttl", channel == null ? "" : channel.getTTL());
        prop.put("showitems_docs", feed.size()); // number of documents

        int i = 0;
        for (final Hit item : feed) {
            DigestURL link = null;
            final String linkStr = item.getLink();
            if (StringUtils.isNotBlank(linkStr)) {
                /* Link element is optional in RSS 2.0 and Atom */
                try {
                    link = new DigestURL(linkStr);
                } catch (final MalformedURLException e) {
                    ConcurrentLog.warn("Load_RSS", "Malformed feed item link URL : " + linkStr);
                }
            }

            DigestURL enclosure = null;
            final String enclosureStr = item.getEnclosure();
            if (StringUtils.isNotBlank(enclosureStr)) {
                try {
                    enclosure = new DigestURL(enclosureStr);
                } catch (final MalformedURLException e) {
                    ConcurrentLog.warn("Load_RSS", "Malformed feed item enclosure URL : " + enclosureStr);
                }
            }

            if (link == null) {
                /* No link in this feed item : we use the enclosure media URL as the main link */
                link = enclosure;
            }

            author = item.getAuthor();
            if (author == null) {
                author = item.getCopyright();
            }
            pubDate = item.getPubDate();

            HarvestProcess harvestProcess;
            try {
                if (link != null && StringUtils.isNotEmpty(item.getGuid())) {
                    harvestProcess = sb.urlExists(ASCII.String(link.hash()));

                    prop.put("showitems_item_" + i + "_hasLink", true);
                    prop.putHTML("showitems_item_" + i + "_hasLink_link", link.toNormalform(true));
                    final int state = harvestProcess != null ? 2
                            : RSSLoader.indexTriggered.containsKey(link.hash()) ? 1 : 0;
                    prop.put("showitems_item_" + i + "_state", state);
                    prop.put("showitems_item_" + i + "_indexable", state == 0);
                    prop.put("showitems_item_" + i + "_indexable_count", i);
                    prop.putHTML("showitems_item_" + i + "_indexable_inputValue",
                            (link == enclosure ? CHECKBOX_MEDIA_ITEM_PREFIX : CHECKBOX_ITEM_PREFIX)
                                    + item.getGuid());
                } else {
                    prop.put("showitems_item_" + i + "_state", 0);
                    prop.put("showitems_item_" + i + "_indexable", false);
                    prop.put("showitems_item_" + i + "_hasLink", false);
                }
                prop.putHTML("showitems_item_" + i + "_author", author == null ? "" : author);
                prop.putHTML("showitems_item_" + i + "_title", item.getTitle());
                prop.putHTML("showitems_item_" + i + "_description", item.getDescriptions().toString());
                prop.put("showitems_item_" + i + "_defaultMediaDesc", false);
                prop.putHTML("showitems_item_" + i + "_language", item.getLanguage());
                prop.putHTML("showitems_item_" + i + "_date",
                        (pubDate == null) ? "" : DateFormat.getDateTimeInstance().format(pubDate));
                i++;
            } catch (IOException e) {
                ConcurrentLog.logException(e);
            }

            try {
                if (enclosure != null && enclosure != link && StringUtils.isNotEmpty(item.getGuid())) {
                    harvestProcess = sb.urlExists(ASCII.String(enclosure.hash()));

                    prop.put("showitems_item_" + i + "_hasLink", true);
                    prop.putHTML("showitems_item_" + i + "_hasLink_link", enclosure.toNormalform(true));
                    final int state = harvestProcess != null ? 2
                            : RSSLoader.indexTriggered.containsKey(enclosure.hash()) ? 1 : 0;
                    prop.put("showitems_item_" + i + "_state", state);
                    prop.put("showitems_item_" + i + "_indexable", state == 0);
                    prop.put("showitems_item_" + i + "_indexable_count", i);
                    prop.putHTML("showitems_item_" + i + "_indexable_inputValue", "media_" + item.getGuid());
                    prop.putHTML("showitems_item_" + i + "_author", "");
                    prop.putHTML("showitems_item_" + i + "_title", item.getTitle());
                    prop.putHTML("showitems_item_" + i + "_description", "");
                    /* Description is already used for the main item link, use here a default one */
                    prop.put("showitems_item_" + i + "_defaultMediaDesc", true);
                    prop.putHTML("showitems_item_" + i + "_language", "");
                    prop.putHTML("showitems_item_" + i + "_date", "");
                    i++;
                }
            } catch (IOException e) {
                ConcurrentLog.logException(e);
            }
        }
        prop.put("showitems_item", i);
        prop.put("showitems_num", i);
        prop.putHTML("showitems_rss", url.toNormalform(true));
        if (i > 0) {
            prop.put("showload", 1);
            prop.put("showload_rss", url.toNormalform(true));
        }
    }

    return prop;
}

From source file:unalcol.termites.boxplots.HybridGlobalInfoReport.java

private static void createSuperGraph() {
    XYSeriesCollection juegoDatos = null;
    Hashtable<String, XYSeriesCollection> dataCollected = new Hashtable();
    DefaultCategoryDataset defaultcategorydataset = new DefaultCategoryDataset();
    String sDirectorio = experimentsDir;
    File f = new File(sDirectorio);
    String extension;//from w  ww  .j av a 2s .c om
    File[] files = f.listFiles();
    Hashtable<String, String> Pop = new Hashtable<>();
    PrintWriter escribir;
    Scanner sc = null;
    double sucessfulExp = 0.0;
    for (File file : files) {
        extension = "";
        int i = file.getName().lastIndexOf('.');
        int p = Math.max(file.getName().lastIndexOf('/'), file.getName().lastIndexOf('\\'));
        if (i > p) {
            extension = file.getName().substring(i + 1);
        }
        // System.out.println(file.getName() + "extension" + extension);
        if (file.isFile() && extension.equals("csv") && file.getName().startsWith("dataCollected")
                && file.getName().contains(mazeMode)) {
            System.out.println(file.getName());
            System.out.println("get: " + file.getName());
            String[] filenamep = file.getName().split(Pattern.quote("+"));
            XYSeries globalInfo;

            System.out.println("file" + filenamep[9]);

            int popsize = Integer.valueOf(filenamep[3]);
            double pf = Double.valueOf(filenamep[5]);
            String mode = filenamep[7];

            int maxIter = -1;
            //if (!filenamep[8].isEmpty()) {
            maxIter = Integer.valueOf(filenamep[9]);
            //}
            int n = 0;
            int rho = 0;
            double evapRate = 0.0;
            //14, 16, 16
            if (mode.equals("hybrid")) {
                n = Integer.valueOf(filenamep[15]);
                rho = Integer.valueOf(filenamep[17]);
                String[] tmp = filenamep[19].split(Pattern.quote("."));

                System.out.println("history size:" + n);
                System.out.println("rho:" + rho);
                String sEvap = tmp[0] + "." + tmp[1];
                evapRate = Double.valueOf(sEvap);
            }

            System.out.println("psize:" + popsize);
            System.out.println("pf:" + pf);
            System.out.println("mode:" + mode);
            System.out.println("maxIter:" + maxIter);

            //String[] aMode = {"random", "levywalk", "sandc", "sandclw"};
            //String[] aMode = {"lwphclwevap", "lwsandc2", "lwsandc", "lwphevap2", "lwphevap"};
            //String[] aMode = {"hybrid", "lwphevap", "levywalk"};
            //String[] aMode = {"levywalk", "lwphevap", "hybrid", "hybrid3", "hybrid4", "sequential"};
            String key;
            if (mode.equals("hybrid")) {
                key = getTechniqueName(mode) + "+" + pf + "+" + rho + "+" + n + "+" + popsize + "+" + evapRate;
            } else {
                key = getTechniqueName(mode) + "+" + pf + "+" + popsize + "+" + evapRate;
            }

            System.out.println("key" + key);
            if (isInMode(aMode, mode)) {
                final List list = new ArrayList();
                try {
                    sc = new Scanner(file);

                } catch (FileNotFoundException ex) {
                    Logger.getLogger(DataCollectedLatexConsolidatorSASOMessagesSend1.class.getName())
                            .log(Level.SEVERE, null, ex);
                }
                int roundNumber = 0;
                double globalInfoCollected = 0.0;

                //ArrayList<Double> acSt = new ArrayList<>();
                //ArrayList<Double> avgExp = new ArrayList<>();
                String[] data = null;
                globalInfo = new XYSeries("");
                while (sc.hasNext()) {
                    String line = sc.nextLine();
                    //System.out.println("line:" + line);
                    data = line.split(",");
                    roundNumber = Integer.valueOf(data[0]);
                    globalInfoCollected = Double.valueOf(data[4]);

                    if (globalInfoCollected > 100) {
                        System.out.println("more than 100:" + file.getName());
                    } else {
                        globalInfo.add(roundNumber, globalInfoCollected);
                    }
                    //System.out.println("r" + roundNumber + "dc:" + globalInfoCollected);
                    //Add Data and generate statistics 
                    //acSt.add((double) agentsCorrect);
                    //avgExp.add(averageExplored);
                }

                if (!dataCollected.containsKey(key)) {
                    juegoDatos = new XYSeriesCollection();
                    juegoDatos.addSeries(globalInfo);
                    dataCollected.put(key, juegoDatos);
                } else {
                    ((XYSeriesCollection) dataCollected.get(key)).addSeries(globalInfo);
                }
                //if (Pf.contains(pf)) {
                /*if (mode.equals("hybrid")) {
                String nameSeries = n + "-" + rho + "-" + evapRate;
                defaultcategorydataset.addValue(((double) sucessfulExp) / acSt.size() * 100.0, "" + popsize, nameSeries);
                } else {
                defaultcategorydataset.addValue(((double) sucessfulExp) / acSt.size() * 100.0, "" + popsize, getTechniqueName(mode) + "\nPf:" + pf);
                }*/
                /*pf == 1.0E-4 || pf == 3.0E-4*/
                //}
            }
        }
    }
    createChart(dataCollected);
}

From source file:de.tudarmstadt.ukp.dkpro.core.textnormalizer.ReplacementFileNormalizer.java

@Override
protected Map<Integer, List<SofaChangeAnnotation>> createSofaChangesMap(JCas jcas) {
    Map<Integer, List<SofaChangeAnnotation>> changesMap = new TreeMap<Integer, List<SofaChangeAnnotation>>();
    int mapKey = 1;

    String coveredText = jcas.getDocumentText().toLowerCase();

    List<SofaChangeAnnotation> scaChangesList = new ArrayList<SofaChangeAnnotation>();
    for (Map.Entry<String, String> entry : replacementMap.entrySet()) {
        String replacementKey = entry.getKey().toLowerCase();
        String replacementValue = targetSurroundings + entry.getValue() + targetSurroundings;

        String regex = srcSurroundingsStart + "(" + Pattern.quote(replacementKey) + ")" + srcSurroundingsEnd;
        Pattern pattern = Pattern.compile(regex);
        Matcher matcher = pattern.matcher(coveredText);

        int groupNumberOfKey = (matcher.groupCount() == 1) ? 1 : 2;

        while (matcher.find()) {
            int start = matcher.start(groupNumberOfKey);
            int end = matcher.end(groupNumberOfKey);

            SofaChangeAnnotation sca = new SofaChangeAnnotation(jcas);
            sca.setBegin(start);// w ww  . j av a  2s . c o m
            sca.setEnd(end);
            sca.setOperation(OP_REPLACE);
            sca.setValue(replacementValue);
            scaChangesList.add(sca);

            System.out.println(matcher.group(0));
        }

    }
    changesMap.put(mapKey++, scaChangesList);

    return changesMap;
}

From source file:fr.dudie.acrachilisync.tools.upgrade.IssueDescriptionReaderV1.java

/**
 * Extracts the list of bug occurrences from the description.
 * //from   w  w w  .  j  a  v  a 2s .c o  m
 * @param pDescription
 *            the issue description
 * @param pStacktraceMD5
 *            the stacktrace MD5 hash the issue is related to
 * @return the ACRA bug occurrences listed in the description
 * @throws IssueParseException
 *             malformed issue description
 */
private Map<String, Date> parseAcraOccurrencesTable(final String pDescription, final String pStacktraceMD5)
        throws IssueParseException {

    final Map<String, Date> occur = new HashMap<String, Date>();

    // escape braces { and } to use strings in regexp
    final String header = IssueDescriptionUtilsV1.getOccurrencesTableHeader();
    final String escHeader = Pattern.quote(header);

    // regexp to find occurrences tables
    final Pattern p = Pattern.compile(escHeader + IssueDescriptionUtilsV1.EOL + "(?:" + OCCURR_LINE_PATTERN
            + IssueDescriptionUtilsV1.EOL + "+)+", Pattern.DOTALL | Pattern.CASE_INSENSITIVE);
    final Matcher m = p.matcher(pDescription);

    if (m.find()) {
        // regexp to find occurrences lines
        final Pattern pLine = Pattern.compile(OCCURR_LINE_PATTERN);
        final Matcher mLine = pLine.matcher(m.group());
        while (mLine.find()) {
            final StringTokenizer line = new StringTokenizer(mLine.group(), "|");
            final String acraReportId = line.nextToken();
            final String acraUserCrashDate = line.nextToken();
            try {
                occur.put(acraReportId, IssueDescriptionUtilsV1.parseDate(acraUserCrashDate));
            } catch (final ParseException e) {
                throw new IssueParseException("Unable to parse user crash date of ACRA report " + acraReportId,
                        e);
            }
        }
    } else {
        throw new IssueParseException("No crash occurrence table found in the description");
    }

    if (m.find()) {
        throw new IssueParseException("More than 1 occurrence table found in the description");
    }

    if (MapUtils.isEmpty(occur)) {
        throw new IssueParseException("0 user crash occurrence found in the description");
    }

    return occur;
}

From source file:com.commander4j.util.JUtility.java

public static String decodeControlChars(String input) {
    String result = input;/*from w  w w. j  a v  a  2  s. c  o m*/

    result = result.replaceAll(Pattern.quote("\u0000"), "<NUL>");
    result = result.replaceAll(Pattern.quote("\u0001"), "<SOH>");
    result = result.replaceAll(Pattern.quote("\u0002"), "<STX>");
    result = result.replaceAll(Pattern.quote("\u0003"), "<ETX>");
    result = result.replaceAll(Pattern.quote("\u0004"), "<EOT>");
    result = result.replaceAll(Pattern.quote("\u0005"), "<ENQ>");
    result = result.replaceAll(Pattern.quote("\u0006"), "<ACK>");
    result = result.replaceAll(Pattern.quote("\u0007"), "<BEL>");
    result = result.replaceAll(Pattern.quote("\u0008"), "<BS>");
    result = result.replaceAll(Pattern.quote("\t"), "<HT>");
    result = result.replaceAll(Pattern.quote("\n"), "<LF>");
    result = result.replaceAll(Pattern.quote("\u000B"), "<VT>");
    result = result.replaceAll(Pattern.quote("\u000C"), "<FF>");
    result = result.replaceAll(Pattern.quote("\r"), "<CR>");
    result = result.replaceAll(Pattern.quote("\u000E"), "<SO>");
    result = result.replaceAll(Pattern.quote("\u000F"), "<SI>");
    result = result.replaceAll(Pattern.quote("\u0010"), "<DLE>");
    result = result.replaceAll(Pattern.quote("\u0011"), "<DC1>");
    result = result.replaceAll(Pattern.quote("\u0012"), "<DC2>");
    result = result.replaceAll(Pattern.quote("\u0013"), "<DC3>");
    result = result.replaceAll(Pattern.quote("\u0014"), "<DC4>");
    result = result.replaceAll(Pattern.quote("\u0015"), "<NAK>");
    result = result.replaceAll(Pattern.quote("\u0016"), "<SYN>");
    result = result.replaceAll(Pattern.quote("\u0017"), "<ETB>");
    result = result.replaceAll(Pattern.quote("\u0018"), "<CAN>");
    result = result.replaceAll(Pattern.quote("\u0019"), "<EM>");
    result = result.replaceAll(Pattern.quote("\u001A"), "<SUB>");
    result = result.replaceAll(Pattern.quote("\u001B"), "<ESC>");
    result = result.replaceAll(Pattern.quote("\u001C"), "<FS>");
    result = result.replaceAll(Pattern.quote("\u001D"), "<GS>");
    result = result.replaceAll(Pattern.quote("\u001E"), "<RS>");
    result = result.replaceAll(Pattern.quote("\u001F"), "<US>");

    return result;
}

From source file:dk.netarkivet.common.utils.batch.FileBatchJob.java

/** Mark the job to process only the specified files.  This will
 * override any previous setting of which files to process.
 *
 * @param specifiedFilenames A list of filenamess to process (without
 * paths). If null, all files will be processed.
 *//*  ww w .ja  va2 s .com*/
public void processOnlyFilesNamed(List<String> specifiedFilenames) {
    if (specifiedFilenames != null) {
        List<String> quoted = new ArrayList<String>();
        for (String name : specifiedFilenames) {
            quoted.add(Pattern.quote(name));
        }
        processOnlyFilesMatching(quoted);
    } else {
        processOnlyFilesMatching(EVERYTHING_REGEXP);
    }
}

From source file:de.hybris.platform.atddengine.ant.tasks.GenerateProxies.java

private void generateJUnitProxy(final File packageDir, final File testSuiteFile) throws IOException {
    try {/*from   ww w. j  a  va 2s .c om*/
        final RobotTestSuite robotTestSuite = getRobotTestSuiteFactory().parseTestSuite(testSuiteFile);

        final String testSuiteName = robotTestSuite.getName().replaceAll(Pattern.quote(" "), "_");

        final Map<String, Object> binding = new HashMap<String, Object>();

        binding.put("packageName",
                rootPackagePath.replaceAll(Pattern.quote("/"), ".") + "." + packageDir.getName());
        binding.put("testSuiteName", testSuiteName);
        binding.put("testSuitePath", testSuiteFile.getPath().replaceAll(Pattern.quote(File.separator), "/"));
        binding.put("projectName", packageDir.getName());

        final List<String> testNames = new ArrayList<String>();

        for (final RobotTest robotTest : robotTestSuite.getRobotTests()) {
            testNames.add(robotTest.getName());
        }

        binding.put("testNames", testNames);

        final File targetFile = new File(packageDir, testSuiteName + ".java");

        final TemplateProcessor templateProcessor = getTemplateProcessorFactory().createTemplateProcessor();

        final Writer writer = new FileWriter(targetFile);
        templateProcessor.processTemplate(writer, templateFile.getName(), binding);
        writer.close();
    } catch (final PyException e) {
        LOG.warn(String.format("Test suite file [%s] is malformed and will be ignored.",
                testSuiteFile.getName()));
    }
}

From source file:com.stratio.qa.specs.CommonG.java

/**
 * Checks if a given string matches a regular expression or contains a string
 *
 * @param expectedMessage message used for comparing
 * @return boolean//from w  w  w  .j ava  2s  .  c  o  m
 */
public static Pattern matchesOrContains(String expectedMessage) {
    Pattern pattern;
    if (expectedMessage.startsWith("regex:")) {
        String regex = expectedMessage.substring(expectedMessage.indexOf("regex:") + 6,
                expectedMessage.length());
        pattern = Pattern.compile(regex);
    } else {
        pattern = Pattern.compile(Pattern.quote(expectedMessage));
    }
    return pattern;
}

From source file:de.uzk.hki.da.format.PublishImageConversionStrategy.java

/**
 *///from  w w w  .  ja  va2 s  .c  om
@Override
public List<Event> convertFile(ConversionInstruction ci) throws FileNotFoundException {
    if (cliConnector == null)
        throw new IllegalStateException("cliConnector not set");
    if (ci.getConversion_routine() == null)
        throw new IllegalStateException("conversionRoutine not set");
    if (ci.getConversion_routine().getTarget_suffix() == null
            || ci.getConversion_routine().getTarget_suffix().isEmpty())
        throw new IllegalStateException("target suffix in conversionRoutine not set");

    List<Event> results = new ArrayList<Event>();

    // connect dafile to package

    String input = ci.getSource_file().toRegularFile().getAbsolutePath();

    // Convert 
    ArrayList<String> commandAsList = null;
    for (String audience : audiences) {

        Path.makeFile(object.getDataPath(), pips, audience.toLowerCase(), ci.getTarget_folder()).mkdirs();

        commandAsList = new ArrayList<String>();
        commandAsList.add("convert");
        commandAsList.add(ci.getSource_file().toRegularFile().getAbsolutePath());
        logger.debug(commandAsList.toString());
        commandAsList = assembleResizeDimensionsCommand(commandAsList, audience);
        commandAsList = assembleWatermarkCommand(commandAsList, audience);
        commandAsList = assembleFooterTextCommand(commandAsList, audience,
                ci.getSource_file().toRegularFile().getAbsolutePath());

        DAFile target = new DAFile(pkg, pips + "/" + audience.toLowerCase(),
                Utilities.slashize(ci.getTarget_folder()) + FilenameUtils.getBaseName(input) + "."
                        + ci.getConversion_routine().getTarget_suffix());
        commandAsList.add(target.toRegularFile().getAbsolutePath());

        logger.debug(commandAsList.toString());
        String[] commandAsArray = new String[commandAsList.size()];
        commandAsArray = commandAsList.toArray(commandAsArray);
        if (!cliConnector.execute(commandAsArray))
            throw new RuntimeException("convert did not succeed: " + Arrays.toString(commandAsArray));

        // In order to support multipage tiffs, we check for files by wildcard expression
        String extension = FilenameUtils.getExtension(target.toRegularFile().getAbsolutePath());
        List<File> wild = findFilesWithRegex(
                new File(FilenameUtils.getFullPath(target.toRegularFile().getAbsolutePath())),
                Pattern.quote(FilenameUtils.getBaseName(target.getRelative_path())) + "-\\d+\\." + extension);
        if (!target.toRegularFile().exists() && !wild.isEmpty()) {
            for (File f : wild) {
                DAFile multipageTarget = new DAFile(pkg, pips + "/" + audience.toLowerCase(),
                        Utilities.slashize(ci.getTarget_folder()) + f.getName());

                Event e = new Event();
                e.setDetail(Utilities.createString(commandAsList));
                e.setSource_file(ci.getSource_file());
                e.setTarget_file(multipageTarget);
                e.setType("CONVERT");
                e.setDate(new Date());
                results.add(e);
            }
        } else {
            Event e = new Event();
            e.setDetail(Utilities.createString(commandAsList));
            e.setSource_file(ci.getSource_file());
            e.setTarget_file(target);
            e.setType("CONVERT");
            e.setDate(new Date());
            results.add(e);
        }
    }

    return results;
}

From source file:com.meltmedia.cadmium.servlets.ApiEndpointAccessFilter.java

@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
        throws IOException, ServletException {
    try {/*  www .  j a va2  s.  c o m*/
        String pathRequested = ((HttpServletRequest) request).getRequestURI();
        if (pathRequested.startsWith(prefix)) {
            pathRequested = pathRequested.replaceFirst(Pattern.quote(prefix), "").replaceFirst("/$", "");
            for (String disabled : controller.getDisabled()) {
                if (disabled.equals(pathRequested)) {
                    ((HttpServletResponse) response).sendError(HttpStatus.SC_NOT_FOUND);
                    return;
                }
            }
        }
        chain.doFilter(request, response);
    } catch (IOException ioe) {
        log.trace("Failed in api endpoint filter.", ioe);
        throw ioe;
    } catch (ServletException se) {
        log.trace("Failed in api endpoint filter.", se);
        throw se;
    } catch (Throwable t) {
        log.trace("Failed in api endpoint filter.", t);
        throw new ServletException(t);
    }
}