List of usage examples for java.util SortedSet contains
boolean contains(Object o);
From source file:org.zanata.client.commands.push.RawPushCommand.java
@Override public void run() throws IOException { PushCommand.logOptions(log, getOpts()); consoleInteractor.printfln(DisplayMode.Warning, "Using EXPERIMENTAL project type 'file'."); List<FileTypeInfo> serverAcceptedTypes = fileTypeInfoList(client); if (getOpts().getListFileTypes()) { printFileTypes(serverAcceptedTypes); return;// ww w. j a v a 2 s . co m } if (!pushSource() && !pushTrans()) { throw new RuntimeException("Invalid option for push type"); } // only supporting single module for now File sourceDir = getOpts().getSrcDir(); if (!sourceDir.exists()) { boolean enableModules = getOpts().getEnableModules(); // TODO(files) remove warning when modules supported if (enableModules) { consoleInteractor.printfln(DisplayMode.Warning, "enableModules=true but multi-modules not yet supported for this command. Using single module push."); } throw new RuntimeException("directory '" + sourceDir + "' does not exist - check " + getOpts().getSrcDirParameterName() + " option"); } RawPushStrategy strat = new RawPushStrategy(); strat.setPushOptions(getOpts()); ImmutableList<FileTypeInfo> actualFileTypes = getActualFileTypes(serverAcceptedTypes, getOpts().getFileTypes()); if (actualFileTypes.isEmpty()) { log.info("no valid types specified; nothing to do"); return; } ImmutableList.Builder<String> sourceFileExtensionsBuilder = ImmutableList.builder(); actualFileTypes .forEach(fileTypeInfo -> sourceFileExtensionsBuilder.addAll(fileTypeInfo.getSourceExtensions())); ImmutableList<String> sourceFileExtensions = sourceFileExtensionsBuilder.build(); String[] srcFiles = strat.getSrcFiles(sourceDir, getOpts().getIncludes(), getOpts().getExcludes(), sourceFileExtensions, true, getOpts().getCaseSensitive()); SortedSet<String> localDocNames = new TreeSet<String>(Arrays.asList(srcFiles)); // TODO(files) handle obsolete document deletion consoleInteractor.printfln(DisplayMode.Warning, "Obsolete document removal is not yet implemented, no documents will be removed from the server."); SortedSet<String> docsToPush = localDocNames; if (getOpts().getFromDoc() != null) { if (!localDocNames.contains(getOpts().getFromDoc())) { log.error("Document with id {} not found, unable to start push from unknown document. Aborting.", getOpts().getFromDoc()); // FIXME should this be throwing an exception to properly abort? // need to see behaviour with modules return; } docsToPush = localDocNames.tailSet(getOpts().getFromDoc()); int numSkippedDocs = localDocNames.size() - docsToPush.size(); log.info("Skipping {} document(s) before {}.", numSkippedDocs, getOpts().getFromDoc()); } if (docsToPush.isEmpty()) { log.info("no documents in module: {}; nothing to do", getOpts().getCurrentModule()); return; } else { consoleInteractor.printfln("Found source documents:"); for (String docName : localDocNames) { if (docsToPush.contains(docName)) { FileTypeName fileType = getFileTypeNameBySourceExtension(actualFileTypes, FilenameUtils.getExtension(docName)); consoleInteractor.printfln(" " + Messages.format("push.info.documentToPush", docName, fileType.getName())); } else { consoleInteractor.printfln(Messages.format("push.info.skipDocument", docName)); } } } if (pushTrans()) { if (getOpts().getLocaleMapList() == null) throw new ConfigException( "pushType set to '" + getOpts().getPushType() + "', but project has no locales configured"); consoleInteractor.printfln(DisplayMode.Warning, Messages.format("push.warn.overrideTranslations", getOpts().getPushType())); if (getOpts().getPushType() == PushPullType.Both) { confirmWithUser("This will overwrite existing documents AND TRANSLATIONS on the server.\n"); // , and delete obsolete documents.\n"); } else if (getOpts().getPushType() == PushPullType.Trans) { confirmWithUser("This will overwrite existing TRANSLATIONS on the server.\n"); } } else { // confirmWithUser("This will overwrite existing documents on the server, and delete obsolete documents.\n"); confirmWithUser("This will overwrite existing documents on the server.\n"); } boolean hasErrors = false; for (final String localDocName : docsToPush) { try { final String srcExtension = FilenameUtils.getExtension(localDocName); final FileTypeInfo fileType = getFileType(actualFileTypes, srcExtension); final String qualifiedDocName = qualifiedDocName(localDocName); if (pushSource()) { if (!getOpts().isDryRun()) { boolean sourcePushed = pushSourceDocumentToServer(sourceDir, localDocName, qualifiedDocName, fileType.getType().getName()); // ClientUtility.checkResult(putResponse, uri); if (!sourcePushed) { hasErrors = true; } } else { log.info("pushing source doc [qualifiedname={}] to server (skipped due to dry run)", qualifiedDocName); } } if (pushTrans()) { Optional<String> translationFileExtension = getTranslationFileExtension(fileType, srcExtension); strat.visitTranslationFiles(localDocName, new TranslationFilesVisitor() { @Override public void visit(LocaleMapping locale, File translatedDoc) { log.info("pushing {} translation of {}", locale.getLocale(), qualifiedDocName); pushDocumentToServer(qualifiedDocName, fileType.getType().getName(), locale.getLocale(), translatedDoc); } }, translationFileExtension); } } catch (IOException | RuntimeException e) { log.error( "Operation failed: " + e.getMessage() + "\n\n" + " To retry from the last document, please add the option: {}\n", getOpts().buildFromDocArgument(localDocName)); throw new RuntimeException(e.getMessage(), e); } } if (hasErrors) { throw new RuntimeException("Push completed with errors, see log for details."); } }
From source file:cerrla.modular.ModularPolicy.java
/** * Creates a new modular policy from an existing basic relational policy. * /*from w w w . j a v a2s .c o m*/ * @param basicPolicy * The basic policy with rules to transfer to this policy. * @param policyGenerator * The generator that created this policy. */ public ModularPolicy(RelationalPolicy newPol, LocalCrossEntropyDistribution policyGenerator) { this(policyGenerator); policySize_ = newPol.size(); isEvaluated_ = false; // Add the rules, creating ModularHoles where appropriate. SortedSet<GoalCondition> subGoals = new TreeSet<GoalCondition>(new GoalConditionComparator()); for (PolicyItem reo : newPol.getRules()) { if (reo instanceof RelationalRule) { RelationalRule rule = (RelationalRule) reo; policyRules_.add(reo); // Checking for sub-goals // Only have each sub-goal once if (ProgramArgument.USE_MODULES.booleanValue()) { Collection<SpecificGoalCondition> goalConds = rule.getSpecificSubGoals(); for (GoalCondition gc : goalConds) { if (!subGoals.contains(gc)) { ModularSubGoal subGoal = new ModularSubGoal(gc, rule); subGoals.add(gc); policyRules_.add(subGoal); childrenPolicies_.put(rule, subGoal); } } } // General sub-goals if (ProgramArgument.USE_GENERAL_MODULES.booleanValue()) { Collection<GeneralGoalCondition>[] generalisedConds = rule.getGeneralisedConditions(); // Add all general conditions, and fill in the blanks // when necessary. for (GoalCondition gc : generalisedConds[0]) { if (!subGoals.contains(gc)) { ModularSubGoal subGoal = new ModularSubGoal(gc, rule); subGoals.add(gc); policyRules_.add(subGoal); childrenPolicies_.put(rule, subGoal); } } for (GoalCondition gc : generalisedConds[1]) { if (!subGoals.contains(gc)) { ModularSubGoal subGoal = new ModularSubGoal(gc, rule); subGoals.add(gc); policyRules_.add(subGoal); childrenPolicies_.put(rule, subGoal); } } } } } }
From source file:net.sourceforge.fenixedu.domain.Lesson.java
public boolean isDateValidToInsertSummary(YearMonthDay date) { YearMonthDay currentDate = new YearMonthDay(); SortedSet<YearMonthDay> allLessonDatesEvenToday = getAllLessonDatesUntil(currentDate); return (allLessonDatesEvenToday.isEmpty() || date == null) ? false : allLessonDatesEvenToday.contains(date); }
From source file:visolate.Visolate.java
public void mouseClicked(double x, double y, int modifiers) { SortedSet<Net> clickedNets = new TreeSet<Net>(); model.getNetsAtPoint(x, y, 1.0 / display.getDPI(), clickedNets); if (manualTopology.isSelected()) { clearSelection();/*from w w w .j av a 2s .com*/ TopologyProcessor.mergeNets(clickedNets); return; } if ((selectedNet != null) && clickedNets.contains(selectedNet)) { Iterator<Net> it = (clickedNets.tailSet(selectedNet)).iterator(); it.next(); if (it.hasNext()) { selectedNet = it.next(); } else { selectedNet = clickedNets.iterator().next(); } } else { selectedNet = null; if (!clickedNets.isEmpty()) { selectedNet = clickedNets.iterator().next(); } } Net selectedNetSave = selectedNet; if (!((modifiers & MouseEvent.CTRL_DOWN_MASK) != 0)) clearSelection(); selectedNet = selectedNetSave; if (selectedNet != null) { selectedNets.add(selectedNet); selectedNet.setHighlighted(true); } }
From source file:net.sourceforge.subsonic.service.SearchService.java
private void cleanMusicFileInfo() { // Create sorted set of albums. SortedSet<String> albums = new TreeSet<String>(); for (Line line : cachedAlbums) { albums.add(line.file.getPath()); }//from w w w. j a v a 2 s . c om // Page through music_file_info table. int offset = 0; int count = 100; while (true) { List<MusicFileInfo> infos = musicInfoService.getAllMusicFileInfos(offset, count); if (infos.isEmpty()) { break; } offset += infos.size(); for (MusicFileInfo info : infos) { // Disable row if album does not exist on disk any more. if (info.isEnabled() && !albums.contains(info.getPath())) { info.setEnabled(false); musicInfoService.updateMusicFileInfo(info); LOG.debug("Logically deleting info for album " + info.getPath() + ". Not found on disk."); } // Enable row if album has reoccurred on disk. else if (!info.isEnabled() && albums.contains(info.getPath())) { info.setEnabled(true); musicInfoService.updateMusicFileInfo(info); LOG.debug("Logically undeleting info for album " + info.getPath() + ". Found on disk."); } } } }
From source file:org.jahia.tools.maven.plugins.LegalArtifactAggregator.java
private void processJarFile(InputStream inputStream, String jarFilePath, JarMetadata contextJarMetadata, boolean processMavenPom, int level, boolean lookForNotice, boolean lookForLicense, boolean processingSources) throws IOException { // if we don't need to find either a license or notice, don't process the jar at all if (!lookForLicense && !lookForNotice) { return;// w ww . j a v a 2s . c o m } final String indent = getIndent(level); output(indent, "Processing JAR " + jarFilePath + "...", false, true); // JarFile realJarFile = new JarFile(jarFile); JarInputStream jarInputStream = new JarInputStream(inputStream); String bundleLicense = null; Manifest manifest = jarInputStream.getManifest(); if (manifest != null && manifest.getMainAttributes() != null) { bundleLicense = manifest.getMainAttributes().getValue("Bundle-License"); if (bundleLicense != null) { output(indent, "Found Bundle-License attribute with value:" + bundleLicense); KnownLicense knownLicense = getKnowLicenseByName(bundleLicense); // this data is not reliable, especially on the ServiceMix repackaged bundles } } String pomFilePath = null; byte[] pomByteArray = null; final String jarFileName = getJarFileName(jarFilePath); if (contextJarMetadata == null) { contextJarMetadata = jarDatabase.get(jarFileName); if (contextJarMetadata == null) { // compute project name contextJarMetadata = new JarMetadata(jarFilePath, jarFileName); } jarDatabase.put(jarFileName, contextJarMetadata); } Notice notice; JarEntry curJarEntry = null; while ((curJarEntry = jarInputStream.getNextJarEntry()) != null) { if (!curJarEntry.isDirectory()) { final String fileName = curJarEntry.getName(); if (lookForNotice && isNotice(fileName, jarFilePath)) { output(indent, "Processing notice found in " + curJarEntry + "..."); InputStream noticeInputStream = jarInputStream; List<String> noticeLines = IOUtils.readLines(noticeInputStream); notice = new Notice(noticeLines); Map<String, Notice> notices = contextJarMetadata.getNoticeFiles(); if (notices == null) { notices = new TreeMap<>(); notices.put(fileName, notice); output(indent, "Found first notice " + curJarEntry); } else if (!notices.containsValue(notice)) { output(indent, "Found additional notice " + curJarEntry); notices.put(fileName, notice); } else { output(indent, "Duplicated notice in " + curJarEntry); notices.put(fileName, notice); duplicatedNotices.add(jarFilePath); } // IOUtils.closeQuietly(noticeInputStream); } else if (processMavenPom && fileName.endsWith("pom.xml")) { // remember pom file path in case we need it pomFilePath = curJarEntry.getName(); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); IOUtils.copy(jarInputStream, byteArrayOutputStream); pomByteArray = byteArrayOutputStream.toByteArray(); } else if (lookForLicense && isLicense(fileName, jarFilePath)) { output(indent, "Processing license found in " + curJarEntry + "..."); InputStream licenseInputStream = jarInputStream; List<String> licenseLines = IOUtils.readLines(licenseInputStream); LicenseFile licenseFile = new LicenseFile(jarFilePath, fileName, jarFilePath, licenseLines); resolveKnownLicensesByText(licenseFile); if (StringUtils.isNotBlank(licenseFile.getAdditionalLicenseText()) && StringUtils.isNotBlank(licenseFile.getAdditionalLicenseText().trim())) { KnownLicense knownLicense = new KnownLicense(); knownLicense.setId(FilenameUtils.getBaseName(jarFilePath) + "-additional-terms"); knownLicense .setName("Additional license terms from " + FilenameUtils.getBaseName(jarFilePath)); List<TextVariant> textVariants = new ArrayList<>(); TextVariant textVariant = new TextVariant(); textVariant.setId("default"); textVariant.setDefaultVariant(true); textVariant.setText(Pattern.quote(licenseFile.getAdditionalLicenseText())); textVariants.add(textVariant); knownLicense.setTextVariants(textVariants); knownLicense.setTextToUse(licenseFile.getAdditionalLicenseText()); knownLicense.setViral(licenseFile.getText().toLowerCase().contains("gpl")); knownLicenses.getLicenses().put(knownLicense.getId(), knownLicense); licenseFile.getKnownLicenses().add(knownLicense); licenseFile.getKnownLicenseKeys().add(knownLicense.getId()); } for (KnownLicense knownLicense : licenseFile.getKnownLicenses()) { SortedSet<LicenseFile> licenseFiles = knownLicensesFound.get(knownLicense); if (licenseFiles != null) { if (!licenseFiles.contains(licenseFile)) { licenseFiles.add(licenseFile); } knownLicensesFound.put(knownLicense, licenseFiles); } else { licenseFiles = new TreeSet<>(); licenseFiles.add(licenseFile); knownLicensesFound.put(knownLicense, licenseFiles); } } Map<String, LicenseFile> licenseFiles = contextJarMetadata.getLicenseFiles(); if (licenseFiles == null) { licenseFiles = new TreeMap<>(); } if (licenseFiles.containsKey(fileName)) { // warning we already have a license file here, what should we do ? output(indent, "License file already exists for " + jarFilePath + " will override it !", true, false); licenseFiles.remove(fileName); } licenseFiles.put(fileName, licenseFile); // IOUtils.closeQuietly(licenseInputStream); } else if (fileName.endsWith(".jar")) { InputStream embeddedJarInputStream = jarInputStream; ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); IOUtils.copy(embeddedJarInputStream, byteArrayOutputStream); final JarMetadata embeddedJarMetadata = new JarMetadata(jarFilePath, getJarFileName(fileName)); if (embeddedJarMetadata != null) { embeddedJarMetadata.setJarContents(byteArrayOutputStream.toByteArray()); contextJarMetadata.getEmbeddedJars().add(embeddedJarMetadata); } } else if (fileName.endsWith(".class")) { String className = fileName.substring(0, fileName.length() - ".class".length()).replaceAll("/", "."); int lastPoint = className.lastIndexOf("."); String packageName = null; if (lastPoint > 0) { packageName = className.substring(0, lastPoint); SortedSet<String> currentJarPackages = jarDatabase .get(FilenameUtils.getBaseName(jarFilePath)).getPackages(); if (currentJarPackages == null) { currentJarPackages = new TreeSet<>(); } currentJarPackages.add(packageName); } } } jarInputStream.closeEntry(); } jarInputStream.close(); jarInputStream = null; if (!contextJarMetadata.getEmbeddedJars().isEmpty()) { for (JarMetadata embeddedJarMetadata : contextJarMetadata.getEmbeddedJars()) { if (embeddedJarMetadata.getJarContents() != null) { ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream( embeddedJarMetadata.getJarContents()); processJarFile(byteArrayInputStream, contextJarMetadata.toString(), null, true, level, true, true, processingSources); } else { output(indent, "Couldn't find dependency for embedded JAR " + contextJarMetadata, true, false); } } } if (processMavenPom) { if (pomFilePath == null) { output(indent, "No POM found in " + jarFilePath); } else { output(indent, "Processing POM found at " + pomFilePath + " in " + jarFilePath + "..."); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(pomByteArray); processJarPOM(byteArrayInputStream, pomFilePath, jarFilePath, contextJarMetadata, lookForNotice, lookForLicense, contextJarMetadata.getEmbeddedJars(), level + 1, processingSources); } } if (lookForLicense || lookForNotice) { if (lookForLicense) { output(indent, "No license found in " + jarFilePath); } if (lookForNotice) { output(indent, "No notice found in " + jarFilePath); } if (pomFilePath == null && lookForLicense && lookForNotice) { if (StringUtils.isBlank(contextJarMetadata.getVersion())) { output(indent, "Couldn't resolve version for JAR " + contextJarMetadata + ", can't query Maven Central repository without version !"); } else { List<Artifact> mavenCentralArtifacts = findArtifactInMavenCentral(contextJarMetadata.getName(), contextJarMetadata.getVersion(), contextJarMetadata.getClassifier()); if (mavenCentralArtifacts != null && mavenCentralArtifacts.size() == 1) { Artifact mavenCentralArtifact = mavenCentralArtifacts.get(0); Artifact resolvedArtifact = resolveArtifact(mavenCentralArtifact, level); if (resolvedArtifact != null) { // we have a copy of the local artifact, let's request the sources for it. if (!processingSources && !"sources".equals(contextJarMetadata.getClassifier())) { final Artifact artifact = new DefaultArtifact(resolvedArtifact.getGroupId(), resolvedArtifact.getArtifactId(), "sources", "jar", resolvedArtifact.getVersion()); File sourceJar = getArtifactFile(artifact, level); if (sourceJar != null && sourceJar.exists()) { FileInputStream sourceJarInputStream = new FileInputStream(sourceJar); processJarFile(sourceJarInputStream, sourceJar.getPath(), contextJarMetadata, false, level + 1, lookForNotice, lookForLicense, true); IOUtils.closeQuietly(sourceJarInputStream); } } else { // we are already processing a sources artifact, we need to load the pom artifact to extract information from there final Artifact artifact = new DefaultArtifact(resolvedArtifact.getGroupId(), resolvedArtifact.getArtifactId(), null, "pom", resolvedArtifact.getVersion()); File artifactPom = getArtifactFile(artifact, level); if (artifactPom != null && artifactPom.exists()) { output(indent, "Processing POM for " + artifact + "..."); processPOM(lookForNotice, lookForLicense, jarFilePath, contextJarMetadata, contextJarMetadata.getEmbeddedJars(), level + 1, new FileInputStream(artifactPom), processingSources); } } } else { output(indent, "===> Couldn't resolve artifact " + mavenCentralArtifact + " in Maven Central. Please resolve license and notice files manually!", false, true); } } else { output(indent, "===> Couldn't find nor POM, license or notice. Please check manually!", false, true); } } } } output(indent, "Done processing JAR " + jarFilePath + ".", false, true); }
From source file:org.torproject.ernie.db.ArchiveReader.java
public ArchiveReader(RelayDescriptorParser rdp, String archivesDir, boolean keepImportHistory) { int parsedFiles = 0, ignoredFiles = 0; Logger logger = Logger.getLogger(ArchiveReader.class.getName()); SortedSet<String> archivesImportHistory = new TreeSet<String>(); File archivesImportHistoryFile = new File("stats/archives-import-history"); if (keepImportHistory && archivesImportHistoryFile.exists()) { try {/*from w w w .j a va2s .c o m*/ BufferedReader br = new BufferedReader(new FileReader(archivesImportHistoryFile)); String line = null; while ((line = br.readLine()) != null) { archivesImportHistory.add(line); } br.close(); } catch (IOException e) { logger.log(Level.WARNING, "Could not read in archives import " + "history file. Skipping."); } } if (new File(archivesDir).exists()) { logger.fine("Importing files in directory " + archivesDir + "/..."); Stack<File> filesInInputDir = new Stack<File>(); filesInInputDir.add(new File(archivesDir)); List<File> problems = new ArrayList<File>(); while (!filesInInputDir.isEmpty()) { File pop = filesInInputDir.pop(); if (pop.isDirectory()) { for (File f : pop.listFiles()) { filesInInputDir.add(f); } } else { if (rdp != null) { try { BufferedInputStream bis = null; if (keepImportHistory && archivesImportHistory.contains(pop.getName())) { ignoredFiles++; continue; } else if (pop.getName().endsWith(".tar.bz2")) { logger.warning( "Cannot parse compressed tarball " + pop.getAbsolutePath() + ". Skipping."); continue; } else if (pop.getName().endsWith(".bz2")) { FileInputStream fis = new FileInputStream(pop); BZip2CompressorInputStream bcis = new BZip2CompressorInputStream(fis); bis = new BufferedInputStream(bcis); } else { FileInputStream fis = new FileInputStream(pop); bis = new BufferedInputStream(fis); } if (keepImportHistory) { archivesImportHistory.add(pop.getName()); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); int len; byte[] data = new byte[1024]; while ((len = bis.read(data, 0, 1024)) >= 0) { baos.write(data, 0, len); } bis.close(); byte[] allData = baos.toByteArray(); rdp.parse(allData); parsedFiles++; } catch (IOException e) { problems.add(pop); if (problems.size() > 3) { break; } } } } } if (problems.isEmpty()) { logger.fine("Finished importing files in directory " + archivesDir + "/."); } else { StringBuilder sb = new StringBuilder( "Failed importing files in " + "directory " + archivesDir + "/:"); int printed = 0; for (File f : problems) { sb.append("\n " + f.getAbsolutePath()); if (++printed >= 3) { sb.append("\n ... more"); break; } } } } if (keepImportHistory) { try { archivesImportHistoryFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(archivesImportHistoryFile)); for (String line : archivesImportHistory) { bw.write(line + "\n"); } bw.close(); } catch (IOException e) { logger.log(Level.WARNING, "Could not write archives import " + "history file."); } } logger.info("Finished importing relay descriptors from local " + "directory:\nParsed " + parsedFiles + ", ignored " + ignoredFiles + " files."); }
From source file:org.torproject.ernie.db.BridgeSnapshotReader.java
public BridgeSnapshotReader(BridgeDescriptorParser bdp, String bridgeDirectoriesDir) { Logger logger = Logger.getLogger(BridgeSnapshotReader.class.getName()); SortedSet<String> parsed = new TreeSet<String>(); File bdDir = new File(bridgeDirectoriesDir); File pbdFile = new File("stats/parsed-bridge-directories"); boolean modified = false; if (bdDir.exists()) { if (pbdFile.exists()) { logger.fine("Reading file " + pbdFile.getAbsolutePath() + "..."); try { BufferedReader br = new BufferedReader(new FileReader(pbdFile)); String line = null; while ((line = br.readLine()) != null) { parsed.add(line);//from ww w.j a v a 2 s .c o m } br.close(); logger.fine("Finished reading file " + pbdFile.getAbsolutePath() + "."); } catch (IOException e) { logger.log(Level.WARNING, "Failed reading file " + pbdFile.getAbsolutePath() + "!", e); return; } } logger.fine("Importing files in directory " + bridgeDirectoriesDir + "/..."); Stack<File> filesInInputDir = new Stack<File>(); filesInInputDir.add(bdDir); while (!filesInInputDir.isEmpty()) { File pop = filesInInputDir.pop(); if (pop.isDirectory()) { for (File f : pop.listFiles()) { filesInInputDir.add(f); } } else if (!parsed.contains(pop.getName())) { try { FileInputStream in = new FileInputStream(pop); if (in.available() > 0) { GzipCompressorInputStream gcis = new GzipCompressorInputStream(in); TarArchiveInputStream tais = new TarArchiveInputStream(gcis); BufferedInputStream bis = new BufferedInputStream(tais); String fn = pop.getName(); String dateTime = fn.substring(11, 21) + " " + fn.substring(22, 24) + ":" + fn.substring(24, 26) + ":" + fn.substring(26, 28); while ((tais.getNextTarEntry()) != null) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); int len; byte[] data = new byte[1024]; while ((len = bis.read(data, 0, 1024)) >= 0) { baos.write(data, 0, len); } byte[] allData = baos.toByteArray(); if (allData.length == 0) { continue; } String ascii = new String(allData, "US-ASCII"); BufferedReader br3 = new BufferedReader(new StringReader(ascii)); String firstLine = null; while ((firstLine = br3.readLine()) != null) { if (firstLine.startsWith("@")) { continue; } else { break; } } if (firstLine.startsWith("r ")) { bdp.parse(allData, dateTime, false); } else { int start = -1, sig = -1, end = -1; String startToken = firstLine.startsWith("router ") ? "router " : "extra-info "; String sigToken = "\nrouter-signature\n"; String endToken = "\n-----END SIGNATURE-----\n"; while (end < ascii.length()) { start = ascii.indexOf(startToken, end); if (start < 0) { break; } sig = ascii.indexOf(sigToken, start); if (sig < 0) { break; } sig += sigToken.length(); end = ascii.indexOf(endToken, sig); if (end < 0) { break; } end += endToken.length(); byte[] descBytes = new byte[end - start]; System.arraycopy(allData, start, descBytes, 0, end - start); bdp.parse(descBytes, dateTime, false); } } } } in.close(); /* Let's give some memory back, or we'll run out of it. */ System.gc(); parsed.add(pop.getName()); modified = true; } catch (IOException e) { logger.log(Level.WARNING, "Could not parse bridge snapshot " + pop.getName() + "!", e); continue; } } } logger.fine("Finished importing files in directory " + bridgeDirectoriesDir + "/."); if (!parsed.isEmpty() && modified) { logger.fine("Writing file " + pbdFile.getAbsolutePath() + "..."); try { pbdFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(pbdFile)); for (String f : parsed) { bw.append(f + "\n"); } bw.close(); logger.fine("Finished writing file " + pbdFile.getAbsolutePath() + "."); } catch (IOException e) { logger.log(Level.WARNING, "Failed writing file " + pbdFile.getAbsolutePath() + "!", e); } } } }
From source file:com.collabnet.ccf.teamforge.TFTrackerHandler.java
/** * Updates fields of tracker definition This method currently does not * support conflict detection/*www .jav a2 s.c o m*/ * * @param ga * @param trackerId * tracker in question * @param fieldsToBeChanged * fields to be adjusted * @param connection * @return * @throws RemoteException */ public TrackerDO updateTrackerMetaData(GenericArtifact ga, String trackerId, Map<String, SortedSet<String>> fieldsToBeChanged, Connection connection) throws RemoteException { Exception exception = null; for (String fieldName : fieldsToBeChanged.keySet()) { boolean updated = false; while (!updated) { updated = true; try { // we have to refetch this data in the loop to avoid version // mismatch exceptions TrackerFieldDO[] fields = connection.getTrackerClient().getFields(trackerId); // find field in question (we do not create new fields yet) TrackerFieldDO trackerField = null; for (TrackerFieldDO field : fields) { if (field.getName().equals(fieldName)) { trackerField = field; break; } } if (trackerField == null) { throw new CCFRuntimeException( "Field " + fieldName + " of tracker " + trackerId + " could not be found."); } // find out whether field is single select or multi select boolean fieldIsSingleSelect = trackerField.getFieldType() .equals(TrackerFieldDO.FIELD_TYPE_SINGLE_SELECT); SortedSet<String> anticipatedFieldValues = fieldsToBeChanged.get(fieldName); List<TrackerFieldValueDO> deletedFieldValues = new ArrayList<TrackerFieldValueDO>(); Set<String> addedFieldValues = new HashSet<String>(); Map<String, String> currentValues = new HashMap<String, String>(); TrackerFieldValueDO[] currentFieldValues = trackerField.getFieldValues(); for (TrackerFieldValueDO currentFieldValue : currentFieldValues) { currentValues.put(currentFieldValue.getValue(), currentFieldValue.getId()); if (!anticipatedFieldValues.contains(currentFieldValue.getValue())) { deletedFieldValues.add(currentFieldValue); } } for (String anticipatedFieldValue : anticipatedFieldValues) { if (!currentValues.containsKey(anticipatedFieldValue)) { addedFieldValues.add(anticipatedFieldValue); } } if (deletedFieldValues.isEmpty() && addedFieldValues.isEmpty()) { continue; } List<TrackerFieldValueDO> updatedValuesList = new ArrayList<TrackerFieldValueDO>(); for (String anticipatedFieldValue : anticipatedFieldValues) { TrackerFieldValueDO fieldValue = new TrackerFieldValueDO(connection.supports60(), connection.supports50()); fieldValue.setIsDefault(false); fieldValue.setValue(anticipatedFieldValue); fieldValue.setId(currentValues.get(anticipatedFieldValue)); updatedValuesList.add(fieldValue); } // we cannot delete field values if those are still used by // tracker // items for (TrackerFieldValueDO deletedFieldValue : deletedFieldValues) { if (isFieldValueUsed(trackerId, fieldName, deletedFieldValue, fieldIsSingleSelect, connection)) { log.warn("Could not delete field value " + deletedFieldValue.getValue() + " of field " + fieldName + " in tracker " + trackerId + " because there are still artifacts that use this value."); int insertIndex = getInsertIndex(updatedValuesList, deletedFieldValue); updatedValuesList.add(insertIndex, deletedFieldValue); } } TrackerFieldValueDO[] fieldValues = new TrackerFieldValueDO[updatedValuesList.size()]; updatedValuesList.toArray(fieldValues); trackerField.setFieldValues(fieldValues); connection.getTrackerClient().setField(trackerId, trackerField); } catch (AxisFault e) { javax.xml.namespace.QName faultCode = e.getFaultCode(); if (!faultCode.getLocalPart().equals("VersionMismatchFault")) { // throw e; // we do not throw an error yet since we like to give // other fields the chance to be properly updated log.error( "During TF meta data update, an error occured, proceeding to give other fields a chance to be updated ..." + e.getMessage(), e); exception = e; continue; } updated = false; // we currently do not support conflict detection for meta // data updates logConflictResolutor.warn("Stale tracker meta data update, will override in any case ...:", e); } } } if (exception != null) { throw new CCFRuntimeException("During TF tracker meta data update, at least one exception occured.", exception); } return connection.getTrackerClient().getTrackerData(trackerId); }
From source file:org.torproject.collector.relaydescs.ArchiveReader.java
/** Reads all descriptors from the given directory, possibly using a * parse history file, and passes them to the given descriptor * parser. *//* w ww. ja va2 s.c o m*/ public ArchiveReader(RelayDescriptorParser rdp, File archivesDirectory, File statsDirectory, boolean keepImportHistory) { if (rdp == null || archivesDirectory == null || statsDirectory == null) { throw new IllegalArgumentException(); } rdp.setArchiveReader(this); int parsedFiles = 0; int ignoredFiles = 0; SortedSet<String> archivesImportHistory = new TreeSet<String>(); File archivesImportHistoryFile = new File(statsDirectory, "archives-import-history"); if (keepImportHistory && archivesImportHistoryFile.exists()) { try { BufferedReader br = new BufferedReader(new FileReader(archivesImportHistoryFile)); String line = null; while ((line = br.readLine()) != null) { archivesImportHistory.add(line); } br.close(); } catch (IOException e) { logger.warn("Could not read in archives import " + "history file. Skipping.", e); } } if (archivesDirectory.exists()) { logger.debug("Importing files in directory " + archivesDirectory + "/..."); Stack<File> filesInInputDir = new Stack<File>(); filesInInputDir.add(archivesDirectory); List<File> problems = new ArrayList<File>(); Set<File> filesToRetry = new HashSet<File>(); while (!filesInInputDir.isEmpty()) { File pop = filesInInputDir.pop(); if (pop.isDirectory()) { for (File f : pop.listFiles()) { filesInInputDir.add(f); } } else { if (rdp != null) { try { BufferedInputStream bis = null; if (keepImportHistory && archivesImportHistory.contains(pop.getName())) { ignoredFiles++; continue; } else if (pop.getName().endsWith(".tar.bz2")) { logger.warn( "Cannot parse compressed tarball " + pop.getAbsolutePath() + ". Skipping."); continue; } else if (pop.getName().endsWith(".bz2")) { FileInputStream fis = new FileInputStream(pop); BZip2CompressorInputStream bcis = new BZip2CompressorInputStream(fis); bis = new BufferedInputStream(bcis); } else { FileInputStream fis = new FileInputStream(pop); bis = new BufferedInputStream(fis); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); int len; byte[] data = new byte[1024]; while ((len = bis.read(data, 0, 1024)) >= 0) { baos.write(data, 0, len); } bis.close(); byte[] allData = baos.toByteArray(); boolean stored = rdp.parse(allData); if (!stored) { filesToRetry.add(pop); continue; } if (keepImportHistory) { archivesImportHistory.add(pop.getName()); } parsedFiles++; } catch (IOException e) { problems.add(pop); if (problems.size() > 3) { break; } } } } } for (File pop : filesToRetry) { /* TODO We need to parse microdescriptors ourselves, rather than * RelayDescriptorParser, because only we know the valid-after * time(s) of microdesc consensus(es) containing this * microdescriptor. However, this breaks functional abstraction * pretty badly. */ if (rdp != null) { try { BufferedInputStream bis = null; if (pop.getName().endsWith(".bz2")) { FileInputStream fis = new FileInputStream(pop); BZip2CompressorInputStream bcis = new BZip2CompressorInputStream(fis); bis = new BufferedInputStream(bcis); } else { FileInputStream fis = new FileInputStream(pop); bis = new BufferedInputStream(fis); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); int len; byte[] data = new byte[1024]; while ((len = bis.read(data, 0, 1024)) >= 0) { baos.write(data, 0, len); } bis.close(); byte[] allData = baos.toByteArray(); BufferedReader br = new BufferedReader(new StringReader(new String(allData, "US-ASCII"))); String line; do { line = br.readLine(); } while (line != null && line.startsWith("@")); br.close(); if (line == null) { logger.debug("We were given an empty descriptor for " + "parsing. Ignoring."); continue; } if (!line.equals("onion-key")) { logger.debug("Skipping non-recognized descriptor."); continue; } SimpleDateFormat parseFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); parseFormat.setTimeZone(TimeZone.getTimeZone("UTC")); String ascii = null; try { ascii = new String(allData, "US-ASCII"); } catch (UnsupportedEncodingException e) { /* No way that US-ASCII is not supported. */ } int start = -1; int end = -1; String startToken = "onion-key\n"; while (end < ascii.length()) { start = ascii.indexOf(startToken, end); if (start < 0) { break; } end = ascii.indexOf(startToken, start + 1); if (end < 0) { end = ascii.length(); if (end <= start) { break; } } byte[] descBytes = new byte[end - start]; System.arraycopy(allData, start, descBytes, 0, end - start); String digest256Base64 = Base64.encodeBase64String(DigestUtils.sha256(descBytes)) .replaceAll("=", ""); String digest256Hex = DigestUtils.sha256Hex(descBytes); if (!this.microdescriptorValidAfterTimes.containsKey(digest256Hex)) { logger.debug("Could not store microdescriptor '" + digest256Hex + "', which was not contained in a " + "microdesc consensus."); continue; } for (String validAfterTime : this.microdescriptorValidAfterTimes.get(digest256Hex)) { try { long validAfter = parseFormat.parse(validAfterTime).getTime(); rdp.storeMicrodescriptor(descBytes, digest256Hex, digest256Base64, validAfter); } catch (ParseException e) { logger.warn("Could not parse " + "valid-after time '" + validAfterTime + "'. Not " + "storing microdescriptor.", e); } } } if (keepImportHistory) { archivesImportHistory.add(pop.getName()); } parsedFiles++; } catch (IOException e) { problems.add(pop); if (problems.size() > 3) { break; } } } } if (problems.isEmpty()) { logger.debug("Finished importing files in directory " + archivesDirectory + "/."); } else { StringBuilder sb = new StringBuilder( "Failed importing files in " + "directory " + archivesDirectory + "/:"); int printed = 0; for (File f : problems) { sb.append("\n " + f.getAbsolutePath()); if (++printed >= 3) { sb.append("\n ... more"); break; } } } } if (keepImportHistory) { try { archivesImportHistoryFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(archivesImportHistoryFile)); for (String line : archivesImportHistory) { bw.write(line + "\n"); } bw.close(); } catch (IOException e) { logger.warn("Could not write archives import " + "history file."); } } logger.info("Finished importing relay descriptors from local " + "directory:\nParsed " + parsedFiles + ", ignored " + ignoredFiles + " files."); }