List of usage examples for java.io FileWriter close
public void close() throws IOException
From source file:com.npower.dm.util.ConvertMailProfile.java
/** * @param args/*from w w w . j av a 2 s . co m*/ */ public static void main(String[] args) throws Exception { File outputFile = new File("c:/temp/mail.xml"); FileWriter writer = new FileWriter(outputFile); File csvFile = new File("c:/temp/mail.csv"); BufferedReader reader = new BufferedReader(new FileReader(csvFile)); String line = reader.readLine(); while (line != null) { line = reader.readLine(); if (StringUtils.isEmpty(line)) { continue; } String[] cols = StringUtils.split(line, ','); Map<String, String> values = new HashMap<String, String>(); values.put("name", cols[0]); values.put("smtp.host", cols[1]); values.put("pop.host", cols[2]); writeXML(writer, values); } writer.close(); reader.close(); }
From source file:eu.smartfp7.foursquare.AttendanceCrawler.java
/** * The main takes an undefined number of cities as arguments, then initializes * the specific crawling of all the trending venues of these cities. * The trending venues must have been previously identified using the `DownloadPages` * program.//w ww . j a v a 2 s . c o m * * Current valid cities are: london, amsterdam, goldcoast, sanfrancisco. * */ public static void main(String[] args) throws Exception { Settings settings = Settings.getInstance(); String folder = settings.getFolder(); // We keep info and error logs, so that we know what happened in case // of incoherence in the time series. Map<String, FileWriter> info_logs = new HashMap<String, FileWriter>(); Map<String, FileWriter> error_logs = new HashMap<String, FileWriter>(); // For each city we monitor, we store the venue IDs that we got from // a previous crawl. Map<String, Collection<String>> city_venues = new HashMap<String, Collection<String>>(); // Contains the epoch time when the last API call has been made for each // venue. Ensures that we get data only once each hour. Map<String, Long> venue_last_call = new HashMap<String, Long>(); // Contains the epoch time when we last checked if time series were broken // for each city. // We do these checks once every day before the batch forecasting begins. Map<String, Long> sanity_checks = new HashMap<String, Long>(); // We also keep in memory the number of checkins for the last hour for // each venue. Map<String, Integer> venue_last_checkin = new HashMap<String, Integer>(); Map<Long, Integer> APICallsCount = new HashMap<Long, Integer>(); DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); int total_venues = 0; long total_calls = 0; long time_spent_on_API = 0; for (String c : args) { settings.checkFileHierarchy(c); city_venues.put(c, loadVenues(c)); total_venues += city_venues.get(c).size(); info_logs.put(c, new FileWriter(folder + c + File.separator + "log" + File.separator + "info.log", true)); error_logs.put(c, new FileWriter(folder + c + File.separator + "log" + File.separator + "error.log", true)); Calendar cal = Calendar.getInstance(); info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Crawler initialization for " + c + ". " + city_venues.get(c).size() + " venues loaded.\n"); info_logs.get(c).flush(); // If we interrupted the program for some reason, we can get back // the in-memory data. // Important: the program must not be interrupted for more than one // hour, or we will lose time series data. for (String venue_id : city_venues.get(c)) { String ts_file = folder + c + File.separator + "attendances_crawl" + File.separator + venue_id + ".ts"; if (new File(ts_file).exists()) { BufferedReader buffer = new BufferedReader(new FileReader(ts_file)); String mem = null, line = null; for (; (line = buffer.readLine()) != null; mem = line) ; buffer.close(); if (mem == null) continue; String[] tmp = mem.split(","); venue_last_call.put(venue_id, df.parse(tmp[0]).getTime()); venue_last_checkin.put(venue_id, Integer.parseInt(tmp[3])); VenueUtil.fixBrokenTimeSeriesVenue(new File(ts_file)); } // if } // for sanity_checks.put(c, cal.getTimeInMillis()); } // for if (total_venues > 5000) { System.out.println( "Too much venues for a single API account (max 5000).\nPlease create a new Foursquare API account and use these credentials.\nExiting now."); return; } while (true) { for (String c : args) { // We create a FIFO queue and pop venue IDs one at a time. LinkedList<String> city_venues_buffer = new LinkedList<String>(city_venues.get(c)); String venue_id = null; // Artificial wait to avoid processors looping at 100% of their capacity // when there is no more venues to crawl for the current hour. Thread.sleep(3000); while ((venue_id = city_venues_buffer.pollFirst()) != null) { // We get the current time according to the city's time zone Calendar cal = Calendar.getInstance(); cal.add(Calendar.MILLISECOND, TimeZone.getTimeZone(settings.getCityTimezone(c)).getOffset(cal.getTime().getTime()) - Calendar.getInstance().getTimeZone().getOffset(cal.getTime().getTime())); //TimeZone.getTimeZone("Europe/London").getOffset(cal.getTime().getTime())); long current_time = DateUtils.truncate(cal.getTime(), Calendar.HOUR).getTime(); // We query Foursquare only once per hour per venue. if (venue_last_call.get(venue_id) != null && current_time < venue_last_call.get(venue_id) + 3600000) continue; intelligentWait(total_venues, cal.getTime().getTime(), (total_calls == 0 ? 0 : Math.round(time_spent_on_API / total_calls))); Venue venue = null; try { long beforeCall = System.currentTimeMillis(); venue = new Venue(getFoursquareVenueById(venue_id, c)); // If there is no last call, this is the beginning of the time series // for this venue. We get the number of people "here now" to initialize // the series. if (venue_last_call.get(venue_id) == null) { /** TODO: by doing this, we keep a representation of the venue dating from the beginning * of the specific crawl. we might want to change this and update this file once * in a while. */ FileWriter info = new FileWriter(folder + c + File.separator + "foursquare_venues" + File.separator + venue_id + ".info"); info.write(venue.getFoursquareJson()); info.close(); FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl" + File.separator + venue_id + ".ts"); out.write("Date,here_now,hour_checkins,total_checkins\n"); out.write(df.format(current_time) + "," + venue.getHereNow() + "," + venue.getHereNow() + "," + venue.getCheckincount() + "\n"); out.close(); } else { FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl" + File.separator + venue_id + ".ts", true); int checks = venue.getCheckincount() - venue_last_checkin.get(venue_id); out.write(df.format(current_time) + "," + venue.getHereNow() + "," + Integer.toString(checks) + "," + venue.getCheckincount() + "\n"); out.close(); } if (APICallsCount.get(current_time) == null) APICallsCount.put(current_time, 1); else APICallsCount.put(current_time, APICallsCount.get(current_time) + 1); total_calls++; venue_last_call.put(venue_id, current_time); venue_last_checkin.put(venue_id, venue.getCheckincount()); time_spent_on_API += System.currentTimeMillis() - beforeCall; } catch (Exception e) { // If something bad happens (crawler not available, IO error, ...), we put the // venue_id in the FIFO queue so that it gets reevaluated later. //e.printStackTrace(); error_logs.get(c) .write("[" + df.format(cal.getTime().getTime()) + "] Error with venue " + venue_id + " (" + e.getMessage() + "). " + APICallsCount.get(current_time) + " API calls so far this hour, " + city_venues_buffer.size() + " venues remaining in the buffer.\n"); error_logs.get(c).flush(); System.out.println("[" + df.format(cal.getTime().getTime()) + "] " + c + " -- " + APICallsCount.get(current_time) + " API calls // " + city_venues_buffer.size() + " venues remaining " + " (" + e.getMessage() + ")"); if (e instanceof FoursquareAPIException) if (((FoursquareAPIException) e).getHttp_code().equals("400") && ((FoursquareAPIException) e).getError_detail() .equals("Venue " + venue_id + " has been deleted")) { city_venues.get(c).remove(venue_id); removeVenue(venue_id, c); } else city_venues_buffer.add(venue_id); continue; } } // while // Every day between 0am and 2am, we repair all the broken time series (if there // is something to repair). Calendar cal = Calendar.getInstance(); if (city_venues_buffer.peekFirst() == null && (cal.getTimeInMillis() - sanity_checks.get(c)) >= 86400000 && cal.get(Calendar.HOUR_OF_DAY) < 2) { VenueUtil.fixBrokenTimeSeriesCity(c, folder); sanity_checks.put(c, cal.getTimeInMillis()); info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Sanity check OK.\n"); info_logs.get(c).flush(); } } // for } // while }
From source file:br.edu.ufcg.lsd.oursim.ui.CLI.java
/** * Exemplo:// w ww . java2 s . c o m * * <pre> * java -jar oursim.jar -w resources/trace_filtrado_primeiros_1000_jobs.txt -m resources/hostinfo_sdsc.dat -synthetic_av -o oursim_trace.txt * -w resources/trace_filtrado_primeiros_1000_jobs.txt -s persistent -nr 20 -md resources/hostinfo_sdsc.dat -av resources/disponibilidade.txt -o oursim_trace.txt * -w resources/new_iosup_workload.txt -s persistent -pd resources/iosup_site_description.txt -wt iosup -nr 1 -synthetic_av -o oursim_trace.txt * -w resources/new_workload.txt -s persistent -pd resources/marcus_site_description.txt -wt marcus -nr 20 -d -o oursim_trace.txt * 1 ms + 1 dia = 2678400 segundos * </pre> * * @param args * @throws FileNotFoundException */ public static void main(String[] args) throws IOException { StopWatch stopWatch = new StopWatch(); stopWatch.start(); List<Closeable> closeables = new ArrayList<Closeable>(); CommandLine cmd = parseCommandLine(args, prepareOptions(), HELP, USAGE, EXECUTION_LINE); File outputFile = (File) cmd.getOptionObject(OUTPUT); PrintOutput printOutput = new PrintOutput(outputFile, false); JobEventDispatcher.getInstance().addListener(printOutput); closeables.add(printOutput); if (cmd.hasOption(EXTRACT_REMOTE_WORKLOAD)) { File remoteWorkloadFile = (File) cmd.getOptionObject(EXTRACT_REMOTE_WORKLOAD); Output remoteWorkloadExtractor = new RemoteTasksExtractorOutput(remoteWorkloadFile); closeables.add(remoteWorkloadExtractor); JobEventDispatcher.getInstance().addListener(remoteWorkloadExtractor); } Grid grid = prepareGrid(cmd); ComputingElementEventCounter computingElementEventCounter = prepareOutputAccounting(cmd, cmd.hasOption(VERBOSE)); Input<? extends AvailabilityRecord> availability = defineAvailability(cmd, grid.getMapOfPeers()); prepareOptionalOutputFiles(cmd, grid, (SyntheticAvailabilityCharacterizationAbstract) availability, closeables); long timeOfFirstSubmission = cmd.getOptionValue(WORKLOAD_TYPE).equals("gwa") ? GWAFormat.extractSubmissionTimeFromFirstJob(cmd.getOptionValue(WORKLOAD)) : 0; Workload workload = defineWorkloadType(cmd, cmd.getOptionValue(WORKLOAD), grid.getMapOfPeers(), timeOfFirstSubmission); JobSchedulerPolicy jobScheduler = defineScheduler(cmd, grid.getListOfPeers()); OurSim oursim = new OurSim(EventQueue.getInstance(), grid, jobScheduler, workload, availability); oursim.setActiveEntity(new ActiveEntityImp()); if (cmd.hasOption(HALT_SIMULATION)) { oursim.addHaltEvent(((Number) cmd.getOptionObject(HALT_SIMULATION)).longValue()); } oursim.start(); for (Closeable c : closeables) { c.close(); } EventQueue.getInstance().clear(); // adiciona mtricas-resumo ao fim do arquivo FileWriter fw = new FileWriter(cmd.getOptionValue(OUTPUT), true); closeables.add(fw); stopWatch.stop(); fw.write("# Simulation duration:" + stopWatch + ".\n"); double utilization = grid.getUtilization(); double realUtilization = grid.getTrueUtilization(); int numberOfResourcesByPeer = Integer.parseInt(cmd.getOptionValue(NUM_RESOURCES_BY_PEER, "0")); fw.write(formatSummaryStatistics(computingElementEventCounter, "NA", "NA", false, grid.getPeers().size(), numberOfResourcesByPeer, utilization, realUtilization, stopWatch.getTime()) + "\n"); fw.close(); System.out.println( getSummaryStatistics(computingElementEventCounter, "NA", "NA", false, grid.getPeers().size(), numberOfResourcesByPeer, utilization, realUtilization, stopWatch.getTime())); }
From source file:de.tudarmstadt.ukp.experiments.dip.wp1.documents.Step8GoldDataAggregator.java
public static void main(String[] args) throws Exception { String inputDir = args[0] + "/"; // output dir File outputDir = new File(args[1]); File turkersConfidence = new File(args[2]); if (outputDir.exists()) { outputDir.delete();//from www . j a v a2 s .c o m } outputDir.mkdir(); List<String> annotatorsIDs = new ArrayList<>(); // for (File f : FileUtils.listFiles(new File(inputDir), new String[] { "xml" }, false)) { // QueryResultContainer queryResultContainer = QueryResultContainer // .fromXML(FileUtils.readFileToString(f, "utf-8")); // for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { // for (QueryResultContainer.MTurkRelevanceVote relevanceVote : rankedResults.mTurkRelevanceVotes) { // if (!annotatorsIDs.contains(relevanceVote.turkID)) // annotatorsIDs.add(relevanceVote.turkID); // } // } // } HashMap<String, Integer> countVotesForATurker = new HashMap<>(); // creates temporary file with format for mace // Hashmap annotations: key is the id of a document and a sentence // Value is an array votes[] of turkers decisions: true or false (relevant or not) // the length of this array equals the number of annotators in List<String> annotatorsIDs. // If an annotator worked on the task his decision is written in the array otherwise the value is NULL // key: queryID + clueWebID + sentenceID // value: true and false annotations TreeMap<String, Annotations> annotations = new TreeMap<>(); for (File f : FileUtils.listFiles(new File(inputDir), new String[] { "xml" }, false)) { QueryResultContainer queryResultContainer = QueryResultContainer .fromXML(FileUtils.readFileToString(f, "utf-8")); System.out.println("Reading " + f.getName()); for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { String documentID = rankedResults.clueWebID; for (QueryResultContainer.MTurkRelevanceVote relevanceVote : rankedResults.mTurkRelevanceVotes) { Integer turkerID; if (!annotatorsIDs.contains(relevanceVote.turkID)) { annotatorsIDs.add(relevanceVote.turkID); turkerID = annotatorsIDs.size() - 1; } else { turkerID = annotatorsIDs.indexOf(relevanceVote.turkID); } Integer count = countVotesForATurker.get(relevanceVote.turkID); if (count == null) { count = 0; } count++; countVotesForATurker.put(relevanceVote.turkID, count); String id; List<Integer> trueVotes; List<Integer> falseVotes; for (QueryResultContainer.SingleSentenceRelevanceVote singleSentenceRelevanceVote : relevanceVote.singleSentenceRelevanceVotes) if (!"".equals(singleSentenceRelevanceVote.sentenceID)) { id = f.getName() + "_" + documentID + "_" + singleSentenceRelevanceVote.sentenceID; Annotations turkerVotes = annotations.get(id); if (turkerVotes == null) { trueVotes = new ArrayList<>(); falseVotes = new ArrayList<>(); turkerVotes = new Annotations(trueVotes, falseVotes); } trueVotes = turkerVotes.trueAnnotations; falseVotes = turkerVotes.falseAnnotations; if ("true".equals(singleSentenceRelevanceVote.relevant)) { // votes[turkerID] = true; trueVotes.add(turkerID); } else if ("false".equals(singleSentenceRelevanceVote.relevant)) { // votes[turkerID] = false; falseVotes.add(turkerID); } else { throw new IllegalStateException("Annotation value of sentence " + singleSentenceRelevanceVote.sentenceID + " in " + rankedResults.clueWebID + " equals " + singleSentenceRelevanceVote.relevant); } try { int allVotesCount = trueVotes.size() + falseVotes.size(); if (allVotesCount > 5) { System.err.println(id + " doesn't have 5 annotators: true: " + trueVotes.size() + " false: " + falseVotes.size()); // nasty hack, we're gonna strip some data; true votes first /* we can't do that, it breaks something down the line int toRemove = allVotesCount - 5; if (trueVotes.size() >= toRemove) { trueVotes = trueVotes .subList(0, trueVotes.size() - toRemove); } else if ( falseVotes.size() >= toRemove) { falseVotes = falseVotes .subList(0, trueVotes.size() - toRemove); } */ System.err.println("Adjusted: " + id + " doesn't have 5 annotators: true: " + trueVotes.size() + " false: " + falseVotes.size()); } } catch (IllegalStateException e) { e.printStackTrace(); } turkerVotes.trueAnnotations = trueVotes; turkerVotes.falseAnnotations = falseVotes; annotations.put(id, turkerVotes); } else { throw new IllegalStateException( "Empty Sentence ID in " + f.getName() + " for turker " + turkerID); } } } } File tmp = printHashMap(annotations, annotatorsIDs.size()); String file = TEMP_DIR + "/" + tmp.getName(); MACE.main(new String[] { "--prefix", file }); //gets the keys of the documents and sentences ArrayList<String> lines = (ArrayList<String>) FileUtils.readLines(new File(file + ".prediction")); int i = 0; TreeMap<String, TreeMap<String, ArrayList<HashMap<String, String>>>> ids = new TreeMap<>(); ArrayList<HashMap<String, String>> sentences; if (lines.size() != annotations.size()) { throw new IllegalStateException( "The size of prediction file is " + lines.size() + "but expected " + annotations.size()); } for (Map.Entry entry : annotations.entrySet()) { //1001.xml_clueweb12-1905wb-13-07360_8783 String key = (String) entry.getKey(); String[] IDs = key.split("_"); if (IDs.length > 2) { String queryID = IDs[0]; String clueWebID = IDs[1]; String sentenceID = IDs[2]; TreeMap<String, ArrayList<HashMap<String, String>>> clueWebIDs = ids.get(queryID); if (clueWebIDs == null) { clueWebIDs = new TreeMap<>(); } sentences = clueWebIDs.get(clueWebID); if (sentences == null) { sentences = new ArrayList<>(); } HashMap<String, String> sentence = new HashMap<>(); sentence.put(sentenceID, lines.get(i)); sentences.add(sentence); clueWebIDs.put(clueWebID, sentences); ids.put(queryID, clueWebIDs); } else { throw new IllegalStateException("Wrong ID " + key); } i++; } for (Map.Entry entry : ids.entrySet()) { TreeMap<Integer, String> value = (TreeMap<Integer, String>) entry.getValue(); String queryID = (String) entry.getKey(); QueryResultContainer queryResultContainer = QueryResultContainer .fromXML(FileUtils.readFileToString(new File(inputDir, queryID), "utf-8")); for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { for (Map.Entry val : value.entrySet()) { String clueWebID = (String) val.getKey(); if (clueWebID.equals(rankedResults.clueWebID)) { List<QueryResultContainer.SingleSentenceRelevanceVote> goldEstimatedLabels = new ArrayList<>(); List<QueryResultContainer.SingleSentenceRelevanceVote> turkersVotes = new ArrayList<>(); int size = 0; int hitSize = 0; String hitID = ""; for (QueryResultContainer.MTurkRelevanceVote vote : rankedResults.mTurkRelevanceVotes) { if (!hitID.equals(vote.hitID)) { hitID = vote.hitID; hitSize = vote.singleSentenceRelevanceVotes.size(); size = size + hitSize; turkersVotes.addAll(vote.singleSentenceRelevanceVotes); } else { if (vote.singleSentenceRelevanceVotes.size() != hitSize) { hitSize = vote.singleSentenceRelevanceVotes.size(); size = size + hitSize; turkersVotes.addAll(vote.singleSentenceRelevanceVotes); } } } ArrayList<HashMap<String, String>> sentenceList = (ArrayList<HashMap<String, String>>) val .getValue(); if (sentenceList.size() != turkersVotes.size()) { try { throw new IllegalStateException("Expected size of annotations is " + turkersVotes.size() + "but found " + sentenceList.size() + " for document " + rankedResults.clueWebID + " in " + queryID); } catch (IllegalStateException ex) { ex.printStackTrace(); } } for (QueryResultContainer.SingleSentenceRelevanceVote s : turkersVotes) { String valSentence = null; for (HashMap<String, String> anno : sentenceList) { if (anno.keySet().contains(s.sentenceID)) { valSentence = anno.get(s.sentenceID); } } QueryResultContainer.SingleSentenceRelevanceVote singleSentenceVote = new QueryResultContainer.SingleSentenceRelevanceVote(); singleSentenceVote.sentenceID = s.sentenceID; if (("false").equals(valSentence)) { singleSentenceVote.relevant = "false"; } else if (("true").equals(valSentence)) { singleSentenceVote.relevant = "true"; } else { throw new IllegalStateException("Annotation value of sentence " + singleSentenceVote.sentenceID + " equals " + val.getValue()); } goldEstimatedLabels.add(singleSentenceVote); } rankedResults.goldEstimatedLabels = goldEstimatedLabels; } } } File outputFile = new File(outputDir, queryID); FileUtils.writeStringToFile(outputFile, queryResultContainer.toXML(), "utf-8"); System.out.println("Finished " + outputFile); } ArrayList<String> annotators = (ArrayList<String>) FileUtils.readLines(new File(file + ".competence")); FileWriter fileWriter; StringBuilder sb = new StringBuilder(); for (int j = 0; j < annotatorsIDs.size(); j++) { String[] s = annotators.get(0).split("\t"); Float score = Float.parseFloat(s[j]); String turkerID = annotatorsIDs.get(j); System.out.println(turkerID + " " + score + " " + countVotesForATurker.get(turkerID)); sb.append(turkerID).append(" ").append(score).append(" ").append(countVotesForATurker.get(turkerID)) .append("\n"); } fileWriter = new FileWriter(turkersConfidence); fileWriter.append(sb.toString()); fileWriter.close(); }
From source file:com.waku.mmdataextract.ComprehensiveSearch.java
@SuppressWarnings("unchecked") public static void main(String[] args) { FileWriter fw = null; try {/*from ww w. ja v a 2 s. c o m*/ fw = new FileWriter(new File("output/ComprehensiveSearch.csv")); fw.write( ",??,?,?,,?,??,,?,1,2,3,\n"); } catch (IOException e) { e.printStackTrace(); } Document firstPage = MyHttpClient.getAsDom4jDoc(START_ACTION); // System.out.println(doc.asXML()); List<Element> brandOptions = firstPage.selectNodes("//select[@name='brandId']/option"); for (Element brandOption : brandOptions) { String brandId = brandOption.attributeValue("value"); if (!brandId.equalsIgnoreCase("0")) { for (int i = 1; true; i++) { logger.info("Get brandId/page -> " + brandId + "/" + i); if (searchDone(fw, brandId, i)) { break; } } } } try { fw.close(); } catch (IOException e) { e.printStackTrace(); } logger.info("----> Done!"); logger.info("----> Start to compare production search ... "); CompareProductions.start(prodIdList, 0); }
From source file:com.joliciel.frenchTreebank.FrenchTreebank.java
/** * @param args/*from w ww. jav a 2 s. co m*/ */ public static void main(String[] args) throws Exception { String command = args[0]; String outFilePath = ""; String outDirPath = ""; String treebankPath = ""; String ftbFileName = ""; String rawTextDir = ""; String queryPath = ""; String sentenceNumber = null; boolean firstArg = true; for (String arg : args) { if (firstArg) { firstArg = false; continue; } int equalsPos = arg.indexOf('='); String argName = arg.substring(0, equalsPos); String argValue = arg.substring(equalsPos + 1); if (argName.equals("outfile")) outFilePath = argValue; else if (argName.equals("outdir")) outDirPath = argValue; else if (argName.equals("ftbFileName")) ftbFileName = argValue; else if (argName.equals("treebank")) treebankPath = argValue; else if (argName.equals("sentence")) sentenceNumber = argValue; else if (argName.equals("query")) queryPath = argValue; else if (argName.equals("rawTextDir")) rawTextDir = argValue; else throw new RuntimeException("Unknown argument: " + argName); } TalismaneServiceLocator talismaneServiceLocator = TalismaneServiceLocator.getInstance(); TreebankServiceLocator locator = TreebankServiceLocator.getInstance(talismaneServiceLocator); if (treebankPath.length() == 0) locator.setDataSourcePropertiesFile("jdbc-live.properties"); if (command.equals("search")) { final SearchService searchService = locator.getSearchService(); final XmlPatternSearch search = searchService.newXmlPatternSearch(); search.setXmlPatternFile(queryPath); List<SearchResult> searchResults = search.perform(); FileWriter fileWriter = new FileWriter(outFilePath); for (SearchResult searchResult : searchResults) { String lineToWrite = ""; Sentence sentence = searchResult.getSentence(); Phrase phrase = searchResult.getPhrase(); lineToWrite += sentence.getFile().getFileName() + "|"; lineToWrite += sentence.getSentenceNumber() + "|"; List<PhraseUnit> phraseUnits = searchResult.getPhraseUnits(); LOG.debug("Phrase: " + phrase.getId()); for (PhraseUnit phraseUnit : phraseUnits) lineToWrite += phraseUnit.getLemma().getText() + "|"; lineToWrite += phrase.getText(); fileWriter.write(lineToWrite + "\n"); } fileWriter.flush(); fileWriter.close(); } else if (command.equals("load")) { final TreebankService treebankService = locator.getTreebankService(); final TreebankSAXParser parser = new TreebankSAXParser(); parser.setTreebankService(treebankService); parser.parseDocument(treebankPath); } else if (command.equals("loadAll")) { final TreebankService treebankService = locator.getTreebankService(); File dir = new File(treebankPath); String firstFile = null; if (args.length > 2) firstFile = args[2]; String[] files = dir.list(); if (files == null) { throw new RuntimeException("Not a directory or no children: " + treebankPath); } else { boolean startProcessing = true; if (firstFile != null) startProcessing = false; for (int i = 0; i < files.length; i++) { if (!startProcessing && files[i].equals(firstFile)) startProcessing = true; if (startProcessing) { String filePath = args[1] + "/" + files[i]; LOG.debug(filePath); final TreebankSAXParser parser = new TreebankSAXParser(); parser.setTreebankService(treebankService); parser.parseDocument(filePath); } } } } else if (command.equals("loadRawText")) { final TreebankService treebankService = locator.getTreebankService(); final TreebankRawTextAssigner assigner = new TreebankRawTextAssigner(); assigner.setTreebankService(treebankService); assigner.setRawTextDirectory(rawTextDir); assigner.loadRawText(); } else if (command.equals("tokenize")) { Writer csvFileWriter = null; if (outFilePath != null && outFilePath.length() > 0) { if (outFilePath.lastIndexOf("/") > 0) { String outputDirPath = outFilePath.substring(0, outFilePath.lastIndexOf("/")); File outputDir = new File(outputDirPath); outputDir.mkdirs(); } File csvFile = new File(outFilePath); csvFile.delete(); csvFile.createNewFile(); csvFileWriter = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(csvFile, false), "UTF8")); } try { final TreebankService treebankService = locator.getTreebankService(); TreebankExportService treebankExportService = locator.getTreebankExportServiceLocator() .getTreebankExportService(); TreebankUploadService treebankUploadService = locator.getTreebankUploadServiceLocator() .getTreebankUploadService(); TreebankReader treebankReader = null; if (treebankPath.length() > 0) { File treebankFile = new File(treebankPath); if (sentenceNumber != null) treebankReader = treebankUploadService.getXmlReader(treebankFile, sentenceNumber); else treebankReader = treebankUploadService.getXmlReader(treebankFile); } else { treebankReader = treebankService.getDatabaseReader(TreebankSubSet.ALL, 0); } TokeniserAnnotatedCorpusReader reader = treebankExportService .getTokeniserAnnotatedCorpusReader(treebankReader, csvFileWriter); while (reader.hasNextTokenSequence()) { TokenSequence tokenSequence = reader.nextTokenSequence(); List<Integer> tokenSplits = tokenSequence.getTokenSplits(); String sentence = tokenSequence.getText(); LOG.debug(sentence); int currentPos = 0; StringBuilder sb = new StringBuilder(); for (int split : tokenSplits) { if (split == 0) continue; String token = sentence.substring(currentPos, split); sb.append('|'); sb.append(token); currentPos = split; } LOG.debug(sb.toString()); } } finally { csvFileWriter.flush(); csvFileWriter.close(); } } else if (command.equals("export")) { if (outDirPath.length() == 0) throw new RuntimeException("Parameter required: outdir"); File outDir = new File(outDirPath); outDir.mkdirs(); final TreebankService treebankService = locator.getTreebankService(); FrenchTreebankXmlWriter xmlWriter = new FrenchTreebankXmlWriter(); xmlWriter.setTreebankService(treebankService); if (ftbFileName.length() == 0) { xmlWriter.write(outDir); } else { TreebankFile ftbFile = treebankService.loadTreebankFile(ftbFileName); String fileName = ftbFileName.substring(ftbFileName.lastIndexOf('/') + 1); File xmlFile = new File(outDir, fileName); xmlFile.delete(); xmlFile.createNewFile(); Writer xmlFileWriter = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(xmlFile, false), "UTF8")); xmlWriter.write(xmlFileWriter, ftbFile); xmlFileWriter.flush(); xmlFileWriter.close(); } } else { throw new RuntimeException("Unknown command: " + command); } LOG.debug("========== END ============"); }
From source file:org.apache.cxf.cwiki.SiteExporter.java
public static void main(String[] args) throws Exception { Authenticator.setDefault(new Authenticator() { protected PasswordAuthentication getPasswordAuthentication() { return new PasswordAuthentication(userName, password.toCharArray()); }// ww w . j av a 2s .c o m }); ListIterator<String> it = Arrays.asList(args).listIterator(); List<String> files = new ArrayList<String>(); boolean forceAll = false; int maxThreads = -1; while (it.hasNext()) { String s = it.next(); if ("-debug".equals(s)) { debug = true; } else if ("-user".equals(s)) { userName = it.next(); } else if ("-password".equals(s)) { password = it.next(); } else if ("-d".equals(s)) { rootOutputDir = new File(it.next()); } else if ("-force".equals(s)) { forceAll = true; } else if ("-svn".equals(s)) { svn = true; } else if ("-commit".equals(s)) { commit = true; } else if ("-maxThreads".equals(s)) { maxThreads = Integer.parseInt(it.next()); } else if (s != null && s.length() > 0) { files.add(s); } } List<SiteExporter> exporters = new ArrayList<SiteExporter>(); for (String file : files) { exporters.add(new SiteExporter(file, forceAll)); } List<SiteExporter> modified = new ArrayList<SiteExporter>(); for (SiteExporter exporter : exporters) { if (exporter.initialize()) { modified.add(exporter); } } // render stuff only if needed if (!modified.isEmpty()) { setSiteExporters(exporters); if (maxThreads <= 0) { maxThreads = modified.size(); } ExecutorService executor = Executors.newFixedThreadPool(maxThreads, new ThreadFactory() { public Thread newThread(Runnable r) { Thread t = new Thread(r); t.setDaemon(true); return t; } }); List<Future<?>> futures = new ArrayList<Future<?>>(modified.size()); for (SiteExporter exporter : modified) { futures.add(executor.submit(exporter)); } for (Future<?> t : futures) { t.get(); } } if (commit) { File file = FileUtils.createTempFile("svncommit", "txt"); FileWriter writer = new FileWriter(file); writer.write(svnCommitMessage.toString()); writer.close(); callSvn(rootOutputDir, "commit", "-F", file.getAbsolutePath(), rootOutputDir.getAbsolutePath()); svnCommitMessage.setLength(0); } }
From source file:com.icesoft.faces.webapp.parser.TagToComponentMap.java
/** * Main method for when this class is run to build the serialized data from * a set of TLDS.// www .ja va2 s. c o m * * @param args The runtime arguements. */ public static void main(String args[]) { /* arg[0] is "new" to create serialzed data or 'old' to read serialized data arg[1] is filename for serialized data; arg[2...] are tld's to process */ FileInputStream tldFile = null; TagToComponentMap map = new TagToComponentMap(); if (args[0].equals("new")) { // Build new component map from tlds and serialize it; for (int i = 2; i < args.length; i++) { try { tldFile = new FileInputStream(args[i]); map.addTagAttrib((InputStream) tldFile); } catch (IOException e) { e.printStackTrace(); return; } } try { FileOutputStream fos = new FileOutputStream(args[1]); ObjectOutputStream oos = new ObjectOutputStream(fos); oos.writeObject(map); oos.flush(); oos.close(); } catch (Exception e) { e.printStackTrace(); } } else if (args[0].equals("old")) { // Build component from serialized data; try { FileInputStream fis = new FileInputStream(args[1]); ObjectInputStream ois = new ObjectInputStream(fis); map = (TagToComponentMap) ois.readObject(); } catch (Exception e) { e.printStackTrace(); } } else if (args[0].equals("facelets")) { // Build new component map from tld, and use that to // generate a Facelets taglib.xml // args[0] is command // args[1] is output taglib.xml // args[2] is input tld try { FileWriter faceletsTaglibXmlWriter = new FileWriter(args[1]); String preamble = "<?xml version=\"1.0\"?>\n" + "<facelet-taglib xmlns=\"http://java.sun.com/xml/ns/javaee\"\n" + "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + "xsi:schemaLocation=\"http://java.sun.com/xml/ns/javaee " + "http://java.sun.com/xml/ns/javaee/web-facelettaglibrary_2_0.xsd\"\n" + "version=\"2.0\">\n"; String trailer = "</facelet-taglib>\n"; faceletsTaglibXmlWriter.write(preamble); map.setFaceletsTaglibXmlWriter(faceletsTaglibXmlWriter); tldFile = new FileInputStream(args[2]); map.addTagAttrib((InputStream) tldFile); faceletsTaglibXmlWriter.write(trailer); faceletsTaglibXmlWriter.flush(); faceletsTaglibXmlWriter.close(); } catch (IOException e) { e.printStackTrace(); return; } } }
From source file:ch.kostceco.tools.kostsimy.KOSTSimy.java
/** Die Eingabe besteht aus 2 Parameter: [0] Original-Ordner [1] Replica-Ordner * //ww w. j a va 2s.c o m * @param args * @throws IOException */ public static void main(String[] args) throws IOException { ApplicationContext context = new ClassPathXmlApplicationContext("classpath:config/applicationContext.xml"); // Zeitstempel Start java.util.Date nowStart = new java.util.Date(); java.text.SimpleDateFormat sdfStart = new java.text.SimpleDateFormat("dd.MM.yyyy HH:mm:ss"); String ausgabeStart = sdfStart.format(nowStart); KOSTSimy kostsimy = (KOSTSimy) context.getBean("kostsimy"); File configFile = new File("configuration" + File.separator + "kostsimy.conf.xml"); // Ueberprfung des Parameters (Log-Verzeichnis) String pathToLogfile = kostsimy.getConfigurationService().getPathToLogfile(); File directoryOfLogfile = new File(pathToLogfile); if (!directoryOfLogfile.exists()) { directoryOfLogfile.mkdir(); } // Im Logverzeichnis besteht kein Schreibrecht if (!directoryOfLogfile.canWrite()) { System.out.println( kostsimy.getTextResourceService().getText(ERROR_LOGDIRECTORY_NOTWRITABLE, directoryOfLogfile)); System.exit(1); } if (!directoryOfLogfile.isDirectory()) { System.out.println(kostsimy.getTextResourceService().getText(ERROR_LOGDIRECTORY_NODIRECTORY)); System.exit(1); } // Ist die Anzahl Parameter (2) korrekt? if (args.length > 3) { System.out.println(kostsimy.getTextResourceService().getText(ERROR_PARAMETER_USAGE)); System.exit(1); } File origDir = new File(args[0]); File repDir = new File(args[1]); File logDatei = null; logDatei = origDir; // Informationen zum Arbeitsverzeichnis holen String pathToWorkDir = kostsimy.getConfigurationService().getPathToWorkDir(); /* Nicht vergessen in "src/main/resources/config/applicationContext-services.xml" beim * entsprechenden Modul die property anzugeben: <property name="configurationService" * ref="configurationService" /> */ // Konfiguration des Loggings, ein File Logger wird zustzlich erstellt LogConfigurator logConfigurator = (LogConfigurator) context.getBean("logconfigurator"); String logFileName = logConfigurator.configure(directoryOfLogfile.getAbsolutePath(), logDatei.getName()); File logFile = new File(logFileName); // Ab hier kann ins log geschrieben werden... LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_HEADER)); LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_START, ausgabeStart)); LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_END)); LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_INFO)); System.out.println("KOST-Simy"); System.out.println(""); if (!origDir.exists()) { // Das Original-Verzeichnis existiert nicht LOGGER.logError(kostsimy.getTextResourceService().getText(ERROR_IOE, kostsimy.getTextResourceService().getText(ERROR_NOORIGDIR, origDir.getAbsolutePath()))); System.out .println(kostsimy.getTextResourceService().getText(ERROR_NOORIGDIR, origDir.getAbsolutePath())); System.exit(1); } if (!repDir.exists()) { // Das Replica-Verzeichnis existiert nicht LOGGER.logError(kostsimy.getTextResourceService().getText(ERROR_IOE, kostsimy.getTextResourceService().getText(ERROR_NOREPDIR1, repDir.getAbsolutePath()))); System.out .println(kostsimy.getTextResourceService().getText(ERROR_NOREPDIR1, repDir.getAbsolutePath())); System.exit(1); } File xslOrig = new File("resources" + File.separator + "kost-simy.xsl"); File xslCopy = new File(directoryOfLogfile.getAbsolutePath() + File.separator + "kost-simy.xsl"); if (!xslCopy.exists()) { Util.copyFile(xslOrig, xslCopy); } // Informationen zur prozentualen Stichprobe holen String randomTest = kostsimy.getConfigurationService().getRandomTest(); /* Nicht vergessen in "src/main/resources/config/applicationContext-services.xml" beim * entsprechenden Modul die property anzugeben: <property name="configurationService" * ref="configurationService" /> */ int iRandomTest = 100; try { iRandomTest = Integer.parseInt(randomTest); } catch (Exception ex) { // unzulaessige Eingabe --> 50 wird gesetzt iRandomTest = 50; } if (iRandomTest > 100 || iRandomTest < 1) { // unzulaessige Eingabe --> 50 wird gesetzt iRandomTest = 50; } File tmpDir = new File(pathToWorkDir); /* bestehendes Workverzeichnis Abbruch wenn nicht leer, da am Schluss das Workverzeichnis * gelscht wird und entsprechend bestehende Dateien gelscht werden knnen */ if (tmpDir.exists()) { if (tmpDir.isDirectory()) { // Get list of file in the directory. When its length is not zero the folder is not empty. String[] files = tmpDir.list(); if (files.length > 0) { LOGGER.logError(kostsimy.getTextResourceService().getText(ERROR_IOE, kostsimy.getTextResourceService().getText(ERROR_WORKDIRECTORY_EXISTS, pathToWorkDir))); System.out.println( kostsimy.getTextResourceService().getText(ERROR_WORKDIRECTORY_EXISTS, pathToWorkDir)); System.exit(1); } } } // Im Pfad keine Sonderzeichen Programme knnen evtl abstrzen String patternStr = "[^!#\\$%\\(\\)\\+,\\-_\\.=@\\[\\]\\{\\}\\~:\\\\a-zA-Z0-9 ]"; Pattern pattern = Pattern.compile(patternStr); String name = tmpDir.getAbsolutePath(); String[] pathElements = name.split("/"); for (int i = 0; i < pathElements.length; i++) { String element = pathElements[i]; Matcher matcher = pattern.matcher(element); boolean matchFound = matcher.find(); if (matchFound) { LOGGER.logError(kostsimy.getTextResourceService().getText(ERROR_IOE, kostsimy.getTextResourceService().getText(ERROR_SPECIAL_CHARACTER, name))); System.out.println(kostsimy.getTextResourceService().getText(ERROR_SPECIAL_CHARACTER, name)); System.exit(1); } } // die Anwendung muss mindestens unter Java 6 laufen String javaRuntimeVersion = System.getProperty("java.vm.version"); if (javaRuntimeVersion.compareTo("1.6.0") < 0) { LOGGER.logError(kostsimy.getTextResourceService().getText(ERROR_IOE, kostsimy.getTextResourceService().getText(ERROR_WRONG_JRE))); System.out.println(kostsimy.getTextResourceService().getText(ERROR_WRONG_JRE)); System.exit(1); } // bestehendes Workverzeichnis wieder anlegen if (!tmpDir.exists()) { tmpDir.mkdir(); File origDirTmp = new File(tmpDir.getAbsolutePath() + File.separator + "orig"); File repDirTmp = new File(tmpDir.getAbsolutePath() + File.separator + "rep"); origDirTmp.mkdir(); repDirTmp.mkdir(); } // Im workverzeichnis besteht kein Schreibrecht if (!tmpDir.canWrite()) { LOGGER.logError(kostsimy.getTextResourceService().getText(ERROR_IOE, kostsimy.getTextResourceService().getText(ERROR_WORKDIRECTORY_NOTWRITABLE, tmpDir))); System.out.println(kostsimy.getTextResourceService().getText(ERROR_WORKDIRECTORY_NOTWRITABLE, tmpDir)); System.exit(1); } // Im Pfad keine Sonderzeichen --> Absturzgefahr name = origDir.getAbsolutePath(); pathElements = name.split("/"); for (int i = 0; i < pathElements.length; i++) { String element = pathElements[i]; Matcher matcher = pattern.matcher(element); boolean matchFound = matcher.find(); if (matchFound) { LOGGER.logError(kostsimy.getTextResourceService().getText(ERROR_IOE, kostsimy.getTextResourceService().getText(ERROR_SPECIAL_CHARACTER, name))); System.out.println(kostsimy.getTextResourceService().getText(ERROR_SPECIAL_CHARACTER, name)); System.exit(1); } } name = repDir.getAbsolutePath(); pathElements = name.split("/"); for (int i = 0; i < pathElements.length; i++) { String element = pathElements[i]; Matcher matcher = pattern.matcher(element); boolean matchFound = matcher.find(); if (matchFound) { LOGGER.logError(kostsimy.getTextResourceService().getText(ERROR_IOE, kostsimy.getTextResourceService().getText(ERROR_SPECIAL_CHARACTER, name))); System.out.println(kostsimy.getTextResourceService().getText(ERROR_SPECIAL_CHARACTER, name)); System.exit(1); } } LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_IMAGE1)); float count = 0; int countNio = 0; int countIo = 0; float countVal = 0; int countNotVal = 0; float percentage = (float) 0.0; if (!origDir.isDirectory()) { // TODO: Bildervergleich zweier Dateien --> erledigt --> nur Marker if (repDir.isDirectory()) { // Das Replica-ist ein Verzeichnis, aber Original eine Datei LOGGER.logError(kostsimy.getTextResourceService().getText(ERROR_IOE, kostsimy.getTextResourceService().getText(ERROR_NOREPDIR2, repDir.getAbsolutePath()))); System.out.println( kostsimy.getTextResourceService().getText(ERROR_NOREPDIR2, repDir.getAbsolutePath())); System.exit(1); } boolean compFile = compFile(origDir, logFileName, directoryOfLogfile, repDir, tmpDir); float statIo = 0; int statNio = 0; float statUn = 0; if (compFile) { statIo = 100; } else { statNio = 100; } LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_IMAGE2)); LOGGER.logError( kostsimy.getTextResourceService().getText(MESSAGE_XML_STATISTICS, statIo, statNio, statUn)); LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_LOGEND)); // Zeitstempel End java.util.Date nowEnd = new java.util.Date(); java.text.SimpleDateFormat sdfEnd = new java.text.SimpleDateFormat("dd.MM.yyyy HH:mm:ss"); String ausgabeEnd = sdfEnd.format(nowEnd); ausgabeEnd = "<End>" + ausgabeEnd + "</End>"; Util.valEnd(ausgabeEnd, logFile); Util.amp(logFile); // Die Konfiguration hereinkopieren try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(false); factory.setExpandEntityReferences(false); Document docConfig = factory.newDocumentBuilder().parse(configFile); NodeList list = docConfig.getElementsByTagName("configuration"); Element element = (Element) list.item(0); Document docLog = factory.newDocumentBuilder().parse(logFile); Node dup = docLog.importNode(element, true); docLog.getDocumentElement().appendChild(dup); FileWriter writer = new FileWriter(logFile); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ElementToStream(docLog.getDocumentElement(), baos); String stringDoc2 = new String(baos.toByteArray()); writer.write(stringDoc2); writer.close(); // Der Header wird dabei leider verschossen, wieder zurck ndern String newstring = kostsimy.getTextResourceService().getText(MESSAGE_XML_HEADER); String oldstring = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><KOSTSimyLog>"; Util.oldnewstring(oldstring, newstring, logFile); } catch (Exception e) { LOGGER.logError( "<Error>" + kostsimy.getTextResourceService().getText(ERROR_XML_UNKNOWN, e.getMessage())); System.out.println("Exception: " + e.getMessage()); } if (compFile) { // Lschen des Arbeitsverzeichnisses, falls eines angelegt wurde if (tmpDir.exists()) { Util.deleteDir(tmpDir); } // Validierte Datei valide System.exit(0); } else { // Lschen des Arbeitsverzeichnisses, falls eines angelegt wurde if (tmpDir.exists()) { Util.deleteDir(tmpDir); } // Fehler in Validierte Datei --> invalide System.exit(2); } } else { // TODO: Bildervergleich zweier Verzeichnisse --> in Arbeit --> nur Marker if (!repDir.isDirectory()) { // Das Replica-ist eine Datei, aber Original ein Ordner LOGGER.logError(kostsimy.getTextResourceService().getText(ERROR_IOE, kostsimy.getTextResourceService().getText(ERROR_NOREPDIR3, repDir.getAbsolutePath()))); System.out.println( kostsimy.getTextResourceService().getText(ERROR_NOREPDIR3, repDir.getAbsolutePath())); System.exit(1); } Map<String, File> fileMap = Util.getFileMap(origDir, false); Set<String> fileMapKeys = fileMap.keySet(); boolean other = false; for (Iterator<String> iterator = fileMapKeys.iterator(); iterator.hasNext();) { String entryName = iterator.next(); File newFile = fileMap.get(entryName); if (!newFile.isDirectory()) { origDir = newFile; count = count + 1; if ((origDir.getAbsolutePath().toLowerCase().endsWith(".pdf") || origDir.getAbsolutePath().toLowerCase().endsWith(".pdfa") || origDir.getAbsolutePath().toLowerCase().endsWith(".tif") || origDir.getAbsolutePath().toLowerCase().endsWith(".tiff") || origDir.getAbsolutePath().toLowerCase().endsWith(".jpeg") || origDir.getAbsolutePath().toLowerCase().endsWith(".jpg") || origDir.getAbsolutePath().toLowerCase().endsWith(".jpe") || origDir.getAbsolutePath().toLowerCase().endsWith(".jp2") || origDir.getAbsolutePath().toLowerCase().endsWith(".gif") || origDir.getAbsolutePath().toLowerCase().endsWith(".png") || origDir.getAbsolutePath().toLowerCase().endsWith(".bmp"))) { percentage = 100 / count * countVal; if (percentage < iRandomTest) { // if ( 100 / count * (countVal + 1) <= iRandomTest ) { countVal = countVal + 1; String origWithOutExt = FilenameUtils.removeExtension(origDir.getName()); File repFile = new File( repDir.getAbsolutePath() + File.separator + origWithOutExt + ".pdf"); if (!repFile.exists()) { repFile = new File( repDir.getAbsolutePath() + File.separator + origWithOutExt + ".pdfa"); if (!repFile.exists()) { repFile = new File( repDir.getAbsolutePath() + File.separator + origWithOutExt + ".tif"); if (!repFile.exists()) { repFile = new File(repDir.getAbsolutePath() + File.separator + origWithOutExt + ".tiff"); if (!repFile.exists()) { repFile = new File(repDir.getAbsolutePath() + File.separator + origWithOutExt + ".jpeg"); if (!repFile.exists()) { repFile = new File(repDir.getAbsolutePath() + File.separator + origWithOutExt + ".jpg"); if (!repFile.exists()) { repFile = new File(repDir.getAbsolutePath() + File.separator + origWithOutExt + ".jpe"); if (!repFile.exists()) { repFile = new File(repDir.getAbsolutePath() + File.separator + origWithOutExt + ".jp2"); if (!repFile.exists()) { repFile = new File(repDir.getAbsolutePath() + File.separator + origWithOutExt + ".gif"); if (!repFile.exists()) { repFile = new File(repDir.getAbsolutePath() + File.separator + origWithOutExt + ".png"); if (!repFile.exists()) { repFile = new File(repDir.getAbsolutePath() + File.separator + origWithOutExt + ".bmp"); if (!repFile.exists()) { other = true; LOGGER.logError(kostsimy .getTextResourceService() .getText(MESSAGE_XML_VALERGEBNIS)); LOGGER.logError(kostsimy .getTextResourceService() .getText(MESSAGE_XML_COMPFILE, origDir)); LOGGER.logError(kostsimy .getTextResourceService().getText( MESSAGE_XML_VALERGEBNIS_NOTVALIDATED)); LOGGER.logError( kostsimy.getTextResourceService() .getText(ERROR_NOREP, origDir.getName())); LOGGER.logError(kostsimy .getTextResourceService().getText( MESSAGE_XML_VALERGEBNIS_CLOSE)); } } } } } } } } } } } if (!other) { boolean compFile = compFile(origDir, logFileName, directoryOfLogfile, repFile, tmpDir); if (compFile) { // Vergleich bestanden countIo = countIo + 1; } else { // Vergleich nicht bestanden countNio = countNio + 1; } } } else { countNotVal = countNotVal + 1; LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_VALERGEBNIS)); LOGGER.logError( kostsimy.getTextResourceService().getText(MESSAGE_XML_COMPFILE, origDir)); LOGGER.logError(kostsimy.getTextResourceService() .getText(MESSAGE_XML_VALERGEBNIS_NOTVALIDATED)); LOGGER.logError( kostsimy.getTextResourceService().getText(MESSAGE_XML_VALERGEBNIS_CLOSE)); } } else { countNotVal = countNotVal + 1; LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_VALERGEBNIS)); LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_COMPFILE, origDir)); LOGGER.logError( kostsimy.getTextResourceService().getText(MESSAGE_XML_VALERGEBNIS_NOTVALIDATED)); LOGGER.logError( kostsimy.getTextResourceService().getText(ERROR_INCORRECTFILEENDING, origDir)); LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_VALERGEBNIS_CLOSE)); } } } if (countNio == 0 && countIo == 0) { // keine Dateien verglichen LOGGER.logError(kostsimy.getTextResourceService().getText(ERROR_INCORRECTFILEENDINGS)); System.out.println(kostsimy.getTextResourceService().getText(ERROR_INCORRECTFILEENDINGS)); } float statIo = 100 / (float) count * (float) countIo; float statNio = 100 / (float) count * (float) countNio; float statUn = 100 - statIo - statNio; LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_IMAGE2)); LOGGER.logError( kostsimy.getTextResourceService().getText(MESSAGE_XML_STATISTICS, statIo, statNio, statUn)); LOGGER.logError(kostsimy.getTextResourceService().getText(MESSAGE_XML_LOGEND)); // Zeitstempel End java.util.Date nowEnd = new java.util.Date(); java.text.SimpleDateFormat sdfEnd = new java.text.SimpleDateFormat("dd.MM.yyyy HH:mm:ss"); String ausgabeEnd = sdfEnd.format(nowEnd); ausgabeEnd = "<End>" + ausgabeEnd + "</End>"; Util.valEnd(ausgabeEnd, logFile); Util.amp(logFile); // Die Konfiguration hereinkopieren try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(false); factory.setExpandEntityReferences(false); Document docConfig = factory.newDocumentBuilder().parse(configFile); NodeList list = docConfig.getElementsByTagName("configuration"); Element element = (Element) list.item(0); Document docLog = factory.newDocumentBuilder().parse(logFile); Node dup = docLog.importNode(element, true); docLog.getDocumentElement().appendChild(dup); FileWriter writer = new FileWriter(logFile); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ElementToStream(docLog.getDocumentElement(), baos); String stringDoc2 = new String(baos.toByteArray()); writer.write(stringDoc2); writer.close(); // Der Header wird dabei leider verschossen, wieder zurck ndern String newstring = kostsimy.getTextResourceService().getText(MESSAGE_XML_HEADER); String oldstring = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><KOSTSimyLog>"; Util.oldnewstring(oldstring, newstring, logFile); } catch (Exception e) { LOGGER.logError( "<Error>" + kostsimy.getTextResourceService().getText(ERROR_XML_UNKNOWN, e.getMessage())); System.out.println("Exception: " + e.getMessage()); } if (countNio == 0 && countIo == 0) { // keine Dateien verglichen bestehendes Workverzeichnis ggf. lschen if (tmpDir.exists()) { Util.deleteDir(tmpDir); } System.exit(1); } else if (countNio == 0) { // bestehendes Workverzeichnis ggf. lschen if (tmpDir.exists()) { Util.deleteDir(tmpDir); } // alle Validierten Dateien valide System.exit(0); } else { // bestehendes Workverzeichnis ggf. lschen if (tmpDir.exists()) { Util.deleteDir(tmpDir); } // Fehler in Validierten Dateien --> invalide System.exit(2); } if (tmpDir.exists()) { Util.deleteDir(tmpDir); tmpDir.deleteOnExit(); } } }
From source file:com.nextdoor.bender.S3SnsNotifier.java
public static void main(String[] args) throws ParseException, InterruptedException, IOException { formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'").withZoneUTC(); /*//from w ww .ja v a2 s . co m * Parse cli arguments */ Options options = new Options(); options.addOption(Option.builder().longOpt("bucket").hasArg().required() .desc("Name of S3 bucket to list s3 objects from").build()); options.addOption(Option.builder().longOpt("key-file").hasArg().required() .desc("Local file of S3 keys to process").build()); options.addOption( Option.builder().longOpt("sns-arn").hasArg().required().desc("SNS arn to publish to").build()); options.addOption(Option.builder().longOpt("throttle-ms").hasArg() .desc("Amount of ms to wait between publishing to SNS").build()); options.addOption(Option.builder().longOpt("processed-file").hasArg() .desc("Local file to use to store procssed S3 object names").build()); options.addOption(Option.builder().longOpt("skip-processed").hasArg(false) .desc("Whether to skip S3 objects that have been processed").build()); options.addOption( Option.builder().longOpt("dry-run").hasArg(false).desc("If set do not publish to SNS").build()); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); String bucket = cmd.getOptionValue("bucket"); String keyFile = cmd.getOptionValue("key-file"); String snsArn = cmd.getOptionValue("sns-arn"); String processedFile = cmd.getOptionValue("processed-file", null); boolean skipProcessed = cmd.hasOption("skip-processed"); dryRun = cmd.hasOption("dry-run"); long throttle = Long.parseLong(cmd.getOptionValue("throttle-ms", "-1")); if (processedFile != null) { File file = new File(processedFile); if (!file.exists()) { logger.debug("creating local file to store processed s3 object names: " + processedFile); file.createNewFile(); } } /* * Import S3 keys that have been processed */ if (skipProcessed && processedFile != null) { try (BufferedReader br = new BufferedReader(new FileReader(processedFile))) { String line; while ((line = br.readLine()) != null) { alreadyPublished.add(line.trim()); } } } /* * Setup writer for file containing processed S3 keys */ FileWriter fw = null; BufferedWriter bw = null; if (processedFile != null) { fw = new FileWriter(processedFile, true); bw = new BufferedWriter(fw); } /* * Create clients */ AmazonS3Client s3Client = new AmazonS3Client(); AmazonSNSClient snsClient = new AmazonSNSClient(); /* * Get S3 object list */ try (BufferedReader br = new BufferedReader(new FileReader(keyFile))) { String line; while ((line = br.readLine()) != null) { String key = line.trim(); if (alreadyPublished.contains(key)) { logger.info("skipping " + key); } ObjectMetadata om = s3Client.getObjectMetadata(bucket, key); S3EventNotification s3Notification = getS3Notification(key, bucket, om.getContentLength()); String json = s3Notification.toJson(); /* * Publish to SNS */ if (publish(snsArn, json, snsClient, key) && processedFile != null) { bw.write(key + "\n"); bw.flush(); } if (throttle != -1) { Thread.sleep(throttle); } } } if (processedFile != null) { bw.close(); fw.close(); } }