List of usage examples for java.nio.file Files write
public static Path write(Path path, Iterable<? extends CharSequence> lines, OpenOption... options) throws IOException
From source file:com.github.rinde.datgen.pdptw.DatasetGenerator.java
static void writePropertiesFile(Scenario scen, GeneratorSettings settings, double actualDyn, long seed, String fileName) {/* ww w. j av a2s .c o m*/ final DateTimeFormatter formatter = ISODateTimeFormat.dateHourMinuteSecondMillis(); final VanLon15ProblemClass pc = (VanLon15ProblemClass) scen.getProblemClass(); final ImmutableMap.Builder<String, Object> properties = ImmutableMap.<String, Object>builder() .put("problem_class", pc.getId()).put("id", scen.getProblemInstanceId()) .put("dynamism_bin", pc.getDynamism()).put("dynamism_actual", actualDyn) .put("urgency", pc.getUrgency()).put("scale", pc.getScale()).put("random_seed", seed) .put("creation_date", formatter.print(System.currentTimeMillis())) .put("creator", System.getProperty("user.name")).put("day_length", settings.getDayLength()) .put("office_opening_hours", settings.getOfficeHours()); properties.putAll(settings.getProperties()); final ImmutableMultiset<Class<?>> eventTypes = Metrics.getEventTypeCounts(scen); for (final Multiset.Entry<Class<?>> en : eventTypes.entrySet()) { properties.put(en.getElement().getSimpleName(), en.getCount()); } try { Files.write(Paths.get(fileName + ".properties"), asList(Joiner.on("\n").withKeyValueSeparator(" = ").join(properties.build())), Charsets.UTF_8); } catch (final IOException e) { throw new IllegalStateException(e); } }
From source file:com.streamsets.pipeline.stage.origin.logtail.TestFileTailSource.java
@Test public void testTailJson() throws Exception { File testDataDir = new File("target", UUID.randomUUID().toString()); Assert.assertTrue(testDataDir.mkdirs()); File logFile = new File(testDataDir, "logFile.txt"); FileInfo fileInfo = new FileInfo(); fileInfo.fileFullPath = testDataDir.getAbsolutePath() + "/logFile.txt"; fileInfo.fileRollMode = FileRollMode.REVERSE_COUNTER; fileInfo.firstFile = ""; fileInfo.patternForToken = ""; FileTailConfigBean conf = new FileTailConfigBean(); conf.dataFormat = DataFormat.JSON;//from w ww .j a v a 2s . c om conf.multiLineMainPattern = ""; conf.batchSize = 25; conf.maxWaitTimeSecs = 1; conf.fileInfos = Arrays.asList(fileInfo); conf.postProcessing = PostProcessingOptions.NONE; conf.dataFormatConfig.jsonMaxObjectLen = 1024; FileTailSource source = new FileTailSource(conf, SCAN_INTERVAL); SourceRunner runner = new SourceRunner.Builder(FileTailDSource.class, source).addOutputLane("lane") .addOutputLane("metadata").build(); runner.runInit(); Files.write(logFile.toPath(), Arrays.asList("{\"a\": 1}", "[{\"b\": 2}]"), StandardCharsets.UTF_8); try { long start = System.currentTimeMillis(); StageRunner.Output output = runner.runProduce(null, 1000); long end = System.currentTimeMillis(); Assert.assertTrue(end - start >= 1000); Assert.assertNotNull(output.getNewOffset()); Assert.assertEquals(2, output.getRecords().get("lane").size()); Record record = output.getRecords().get("lane").get(0); Assert.assertEquals(1, record.get("/a").getValue()); record = output.getRecords().get("lane").get(1); Assert.assertEquals(2, record.get("[0]/b").getValue()); } finally { runner.runDestroy(); } }
From source file:org.clipsmonitor.monitor2015.RescueGenMap.java
/** * Salva sul json passato in input le informazioni relative all'agente * da utilizzare per ricaricare la scena * @param json //from w w w.j a va 2 s . c o m */ public boolean SaveJsonRobotParams(File json) { try { JSONObject Info = new JSONObject(); Info.put("robot_x", agentposition[0]); Info.put("robot_y", agentposition[1]); Info.put("robot_x_default", agentposition[0]); Info.put("robot_y_default", agentposition[1]); Info.put("robot_direction", direction); Info.put("robot_loaded", loaded); Files.write(Paths.get(json.getAbsolutePath()), Info.toString(2).getBytes(), StandardOpenOption.APPEND); return true; } catch (JSONException ex) { AppendLogMessage(ex.getMessage(), "error"); return false; } catch (NumberFormatException ex) { AppendLogMessage(ex.getMessage(), "error"); return false; } catch (IOException ex) { Exceptions.printStackTrace(ex); return false; } }
From source file:ca.osmcanada.osvuploadr.JPMain.java
private void Process(String dir, String accessToken) { File dir_photos = new File(dir); //filter only .jpgs FilenameFilter fileNameFilter = new FilenameFilter() { public boolean accept(File dir, String name) { if (name.lastIndexOf('.') > 0) { int lastIndex = name.lastIndexOf('.'); String str = name.substring(lastIndex); if (str.toLowerCase().equals(".jpg")) { return true; }// w w w . java 2 s. com } return false; } }; File[] file_list = dir_photos.listFiles(fileNameFilter); System.out.println("Pictures found:" + String.valueOf(file_list.length)); System.out.println("Sorting files"); //sort by modified time Arrays.sort(file_list, new Comparator<File>() { public int compare(File f1, File f2) { return Long.valueOf(Helper.getFileTime(f1)).compareTo(Helper.getFileTime(f2)); } }); System.out.println("End sorting"); File f_sequence = new File(dir + "/sequence_file.txt"); Boolean need_seq = true; long sequence_id = -1; System.out.println("Checking " + f_sequence.getPath() + " for sequence_file"); if (f_sequence.exists()) { try { System.out.println("Found file, reading sequence id"); List<String> id = Files.readAllLines(Paths.get(f_sequence.getPath())); if (id.size() > 0) { sequence_id = Long.parseLong(id.get(0)); need_seq = false; } } catch (Exception ex) { need_seq = true; } } else { System.out.println("Sequence file not found, will need to request new id"); need_seq = true; } //TODO: Load count from file System.out.println("Looking for count file"); int cnt = 0; File f_cnt = new File(dir + "/count_file.txt"); if (f_cnt.exists()) { System.out.println("Found count file:" + f_cnt.toString()); try { List<String> id = Files.readAllLines(Paths.get(f_cnt.getPath())); if (id.size() > 0) { cnt = Integer.parseInt(id.get(0)); } } catch (Exception ex) { cnt = 0; } } else { try { System.out.println("Creating new count file:" + f_cnt.getPath()); f_cnt.createNewFile(); } catch (Exception ex) { Logger.getLogger(JPMain.class.getName()).log(Level.SEVERE, null, ex); } } System.out.println("Current count at:" + String.valueOf(cnt)); if (cnt > 0) { if (file_list.length > cnt) { File[] tmp = new File[file_list.length - cnt]; int local_cnt = 0; for (int i = cnt; i < file_list.length; i++) { tmp[local_cnt] = file_list[i]; local_cnt++; } file_list = tmp; } } System.out.println("Processing photos..."); //Read file info for (File f : file_list) { try { System.out.println("Processing:" + f.getPath()); ImageProperties imp = Helper.getImageProperties(f); System.out.println("Image Properties:"); System.out.println("Lat:" + String.valueOf(imp.getLatitude()) + " Long:" + String.valueOf(imp.getLongitude()) + "Created:" + String.valueOf(Helper.getFileTime(f))); //TODO: Check that file has GPS coordinates //TODO: Remove invalid photos if (need_seq) { System.out.println("Requesting sequence ID"); sequence_id = getSequence(imp, accessToken); System.out.println("Obtained:" + sequence_id); byte[] bytes = Long.toString(sequence_id).getBytes(StandardCharsets.UTF_8); Files.write(Paths.get(f_sequence.getPath()), bytes, StandardOpenOption.CREATE); need_seq = false; } imp.setSequenceNumber(cnt); cnt++; //TODO: Write count to file System.out.println("Uploading image:" + f.getPath()); Upload_Image(imp, accessToken, sequence_id); System.out.println("Uploaded"); String out = String.valueOf(cnt); Files.write(Paths.get(f_cnt.getPath()), out.getBytes("UTF-8"), StandardOpenOption.TRUNCATE_EXISTING); } catch (Exception ex) { JOptionPane.showMessageDialog(null, ex.getMessage(), "Error", JOptionPane.ERROR); } } System.out.println("Sending finish for sequence:" + sequence_id); SendFinished(sequence_id, accessToken); }
From source file:com.liferay.blade.cli.command.CreateCommandTest.java
@Test public void testCreateWorkspaceCommaDelimitedModulesDirGradleProject() throws Exception { File workspace = new File(_rootDir, "workspace"); _makeWorkspace(workspace);//from w ww. j a v a2 s . co m File gradleProperties = new File(workspace, "gradle.properties"); Assert.assertTrue(gradleProperties.exists()); String configLine = System.lineSeparator() + "liferay.workspace.modules.dir=modules,foo,bar"; Files.write(gradleProperties.toPath(), configLine.getBytes(), StandardOpenOption.APPEND); String[] args = { "create", "-t", "rest", "--base", workspace.getAbsolutePath(), "resttest" }; TestUtil.runBlade(workspace, _extensionsDir, args); String fooBar = workspace.getAbsolutePath() + "/modules,foo,bar"; File fooBarDir = new File(fooBar); Assert.assertFalse("directory named '" + fooBarDir.getName() + "' should not exist, but it does.", fooBarDir.exists()); }
From source file:com.streamsets.pipeline.stage.origin.logtail.TestFileTailSource.java
@Test public void testTailLogFormat() throws Exception { File testDataDir = new File("target", UUID.randomUUID().toString()); Assert.assertTrue(testDataDir.mkdirs()); File logFile = new File(testDataDir, "logFile.txt"); FileInfo fileInfo = new FileInfo(); fileInfo.fileFullPath = testDataDir.getAbsolutePath() + "/logFile.txt"; fileInfo.fileRollMode = FileRollMode.REVERSE_COUNTER; fileInfo.firstFile = ""; fileInfo.patternForToken = ""; FileTailConfigBean conf = new FileTailConfigBean(); conf.dataFormat = DataFormat.LOG;//from w ww . j ava 2 s . c o m conf.multiLineMainPattern = ""; conf.batchSize = 25; conf.maxWaitTimeSecs = 1; conf.fileInfos = Arrays.asList(fileInfo); conf.postProcessing = PostProcessingOptions.NONE; conf.dataFormatConfig.logMaxObjectLen = 1024; conf.dataFormatConfig.logMode = LogMode.LOG4J; conf.dataFormatConfig.retainOriginalLine = true; conf.dataFormatConfig.enableLog4jCustomLogFormat = false; conf.dataFormatConfig.log4jCustomLogFormat = null; FileTailSource source = new FileTailSource(conf, SCAN_INTERVAL); SourceRunner runner = new SourceRunner.Builder(FileTailDSource.class, source).addOutputLane("lane") .addOutputLane("metadata").build(); runner.runInit(); Files.write(logFile.toPath(), Arrays.asList(LINE1, LINE2), StandardCharsets.UTF_8); try { long start = System.currentTimeMillis(); StageRunner.Output output = runner.runProduce(null, 10); long end = System.currentTimeMillis(); Assert.assertTrue(end - start >= 1000); Assert.assertNotNull(output.getNewOffset()); List<Record> records = output.getRecords().get("lane"); Assert.assertEquals(2, records.size()); Assert.assertFalse(records.get(0).has("/truncated")); Record record = records.get(0); Assert.assertEquals(LINE1, record.get().getValueAsMap().get("originalLine").getValueAsString()); Assert.assertFalse(record.has("/truncated")); Assert.assertTrue(record.has("/" + Constants.TIMESTAMP)); Assert.assertEquals("2015-03-20 15:53:31,161", record.get("/" + Constants.TIMESTAMP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.SEVERITY)); Assert.assertEquals("DEBUG", record.get("/" + Constants.SEVERITY).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.CATEGORY)); Assert.assertEquals("PipelineConfigurationValidator", record.get("/" + Constants.CATEGORY).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.MESSAGE)); Assert.assertEquals("Pipeline 'test:preview' validation. valid=true, canPreview=true, issuesCount=0", record.get("/" + Constants.MESSAGE).getValueAsString()); record = records.get(1); Assert.assertEquals(LINE2, record.get().getValueAsMap().get("originalLine").getValueAsString()); Assert.assertFalse(record.has("/truncated")); Assert.assertTrue(record.has("/" + Constants.TIMESTAMP)); Assert.assertEquals("2015-03-21 15:53:31,161", record.get("/" + Constants.TIMESTAMP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.SEVERITY)); Assert.assertEquals("DEBUG", record.get("/" + Constants.SEVERITY).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.CATEGORY)); Assert.assertEquals("PipelineConfigurationValidator", record.get("/" + Constants.CATEGORY).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.MESSAGE)); Assert.assertEquals("Pipeline 'test:preview' validation. valid=true, canPreview=true, issuesCount=1", record.get("/" + Constants.MESSAGE).getValueAsString()); } finally { runner.runDestroy(); } }
From source file:com.streamsets.pipeline.stage.origin.logtail.TestFileTailSource.java
@Test public void testTailLogFormatStackTrace() throws Exception { File testDataDir = new File("target", UUID.randomUUID().toString()); Assert.assertTrue(testDataDir.mkdirs()); File logFile = new File(testDataDir, "logFile.txt"); FileInfo fileInfo = new FileInfo(); fileInfo.fileFullPath = testDataDir.getAbsolutePath() + "/logFile.txt"; fileInfo.fileRollMode = FileRollMode.REVERSE_COUNTER; fileInfo.firstFile = ""; fileInfo.patternForToken = ""; FileTailConfigBean conf = new FileTailConfigBean(); conf.dataFormat = DataFormat.TEXT;/*from w w w .ja va 2 s . co m*/ conf.multiLineMainPattern = "^[0-9].*"; conf.batchSize = 2; conf.maxWaitTimeSecs = 1000; conf.fileInfos = Arrays.asList(fileInfo); conf.postProcessing = PostProcessingOptions.NONE; conf.dataFormatConfig.textMaxLineLen = 2048; FileTailSource source = new FileTailSource(conf, SCAN_INTERVAL); SourceRunner runner = new SourceRunner.Builder(FileTailDSource.class, source).addOutputLane("lane") .addOutputLane("metadata").build(); runner.runInit(); Files.write(logFile.toPath(), Arrays.asList(LINE1, LOG_LINE_WITH_STACK_TRACE, LINE2), StandardCharsets.UTF_8); try { StageRunner.Output out = runner.runProduce(null, 100); Assert.assertEquals(2, out.getRecords().get("lane").size()); Assert.assertEquals(LINE1, out.getRecords().get("lane").get(0).get("/text").getValueAsString().trim()); Assert.assertEquals(LOG_LINE_WITH_STACK_TRACE, out.getRecords().get("lane").get(1).get("/text").getValueAsString().trim()); } finally { runner.runDestroy(); } }
From source file:com.streamsets.datacollector.cluster.BaseClusterProvider.java
@SuppressWarnings("unchecked") private ApplicationState startPipelineInternal(File outputDir, Map<String, String> sourceInfo, PipelineConfiguration pipelineConfiguration, PipelineConfigBean pipelineConfigBean, StageLibraryTask stageLibrary, CredentialStoresTask credentialStoresTask, File etcDir, File resourcesDir, File staticWebDir, File bootstrapDir, URLClassLoader apiCL, URLClassLoader containerCL, long timeToWaitForFailure, File stagingDir, RuleDefinitions ruleDefinitions, Acl acl, InterceptorCreatorContextBuilder interceptorCreatorContextBuilder, List<String> blobStoreResources) throws IOException, TimeoutException, StageException { // create libs.tar.gz file for pipeline Map<String, List<URL>> streamsetsLibsCl = new HashMap<>(); Map<String, List<URL>> userLibsCL = new HashMap<>(); Map<String, String> sourceConfigs = new HashMap<>(); ImmutableList.Builder<StageConfiguration> pipelineConfigurations = ImmutableList.builder(); // order is important here as we don't want error stage // configs overriding source stage configs String clusterToken = UUID.randomUUID().toString(); Set<String> jarsToShip = new LinkedHashSet<>(); List<Issue> errors = new ArrayList<>(); PipelineBean pipelineBean = PipelineBeanCreator.get().create(false, stageLibrary, pipelineConfiguration, interceptorCreatorContextBuilder, errors); if (!errors.isEmpty()) { String msg = Utils.format("Found '{}' configuration errors: {}", errors.size(), errors); throw new IllegalStateException(msg); }/*from www . j a v a2 s. c om*/ pipelineConfigurations.add(pipelineBean.getErrorStage().getConfiguration()); StageBean statsStage = pipelineBean.getStatsAggregatorStage(); // statsStage is null for pre 1.3 pipelines if (statsStage != null) { pipelineConfigurations.add(statsStage.getConfiguration()); } pipelineConfigurations.add(pipelineBean.getOrigin().getConfiguration()); for (StageBean stageBean : pipelineBean.getPipelineStageBeans().getStages()) { pipelineConfigurations.add(stageBean.getConfiguration()); } ExecutionMode executionMode = ExecutionMode.STANDALONE; for (StageConfiguration stageConf : pipelineConfigurations.build()) { StageDefinition stageDef = stageLibrary.getStage(stageConf.getLibrary(), stageConf.getStageName(), false); if (stageConf.getInputLanes().isEmpty()) { for (Config conf : stageConf.getConfiguration()) { if (conf.getValue() != null) { Object value = conf.getValue(); if (value instanceof List) { List values = (List) value; if (values.isEmpty()) { getLog().debug("Conf value for " + conf.getName() + " is empty"); } else { Object first = values.get(0); if (canCastToString(first)) { sourceConfigs.put(conf.getName(), Joiner.on(",").join(values)); } else if (first instanceof Map) { addToSourceConfigs(sourceConfigs, (List<Map<String, Object>>) values); } else { getLog().info( "List is of type '{}' which cannot be converted to property value.", first.getClass().getName()); } } } else if (canCastToString(conf.getValue())) { sourceConfigs.put(conf.getName(), String.valueOf(value)); } else if (value instanceof Enum) { value = ((Enum) value).name(); sourceConfigs.put(conf.getName(), String.valueOf(value)); } else { getLog().warn("Conf value is of unknown type " + conf.getValue()); } } } executionMode = PipelineBeanCreator.get().getExecutionMode(pipelineConfiguration, new ArrayList<Issue>()); List<String> libJarsRegex = stageDef.getLibJarsRegex(); if (!libJarsRegex.isEmpty()) { for (URL jarUrl : ((URLClassLoader) stageDef.getStageClassLoader()).getURLs()) { File jarFile = new File(jarUrl.getPath()); for (String libJar : libJarsRegex) { Pattern pattern = Pattern.compile(libJar); Matcher matcher = pattern.matcher(jarFile.getName()); if (matcher.matches()) { jarsToShip.add(jarFile.getAbsolutePath()); } } } } } // Add stage own stage library to the jars that needs to be shipped extractClassLoaderInfo(streamsetsLibsCl, userLibsCL, stageDef.getStageClassLoader(), stageDef.getClassName()); // TODO: Get extras dir from the env var. // Then traverse each jar's parent (getParent method) and add only the ones who has the extras dir as parent. // Add all jars of stagelib to --jars. We only really need stuff from the extras directory. if (stageDef.getClassName().equals(SPARK_PROCESSOR_STAGE)) { getLog().info("Spark processor found in pipeline, adding to spark-submit"); File extras = new File(System.getenv("STREAMSETS_LIBRARIES_EXTRA_DIR")); getLog().info("Found extras dir: " + extras.toString()); File stageLibExtras = new File(extras.toString() + "/" + stageConf.getLibrary() + "/" + "lib"); getLog().info("StageLib Extras dir: " + stageLibExtras.toString()); File[] extraJarsForStageLib = stageLibExtras.listFiles(); if (extraJarsForStageLib != null) { stream(extraJarsForStageLib).map(File::toString).forEach(jarsToShip::add); } addJarsToJarsList((URLClassLoader) stageDef.getStageClassLoader(), jarsToShip, "streamsets-datacollector-spark-api-[0-9]+.*"); } } for (CredentialStoreDefinition credentialStoreDefinition : credentialStoresTask .getConfiguredStoreDefinititions()) { getLog().info("Adding Credential store stage library for: {}", credentialStoreDefinition.getName()); extractClassLoaderInfo(streamsetsLibsCl, userLibsCL, credentialStoreDefinition.getStageLibraryDefinition().getClassLoader(), credentialStoreDefinition.getStoreClass().getName()); } // We're shipping several stage libraries to the backend and those libraries can have stages that depends on various // different services. Our bootstrap procedure will however terminate SDC start up if we have stage that doesn't have // all required services available. Hence we go through all the stages that are being sent and ship all their // services to the cluster as well. for (StageDefinition stageDef : stageLibrary.getStages()) { String stageLibName = stageDef.getLibrary(); if (streamsetsLibsCl.containsKey(stageLibName) || userLibsCL.containsKey(stageLibName)) { for (ServiceDependencyDefinition serviceDep : stageDef.getServices()) { ServiceDefinition serviceDef = stageLibrary.getServiceDefinition(serviceDep.getService(), false); getLog().debug("Adding service {} for stage {}", serviceDef.getClassName(), stageDef.getName()); extractClassLoaderInfo(streamsetsLibsCl, userLibsCL, serviceDef.getStageClassLoader(), serviceDef.getClassName()); } } } for (InterceptorDefinition interceptor : stageLibrary.getInterceptorDefinitions()) { getLog().debug("Adding interceptor {} for stage {} ", interceptor.getName(), interceptor.getLibraryDefinition().getName()); extractClassLoaderInfo(streamsetsLibsCl, userLibsCL, interceptor.getClassLoader(), interceptor.getName()); } if (configuration != null && configuration.hasName(LineagePublisherConstants.CONFIG_LINEAGE_PUBLISHERS)) { String confDefName = LineagePublisherConstants .configDef(configuration.get(LineagePublisherConstants.CONFIG_LINEAGE_PUBLISHERS, null)); String lineagePublisherDef = configuration.get(confDefName, null); if (lineagePublisherDef != null) { String[] configDef = lineagePublisherDef.split("::"); LineagePublisherDefinition def = stageLibrary.getLineagePublisherDefinition(configDef[0], configDef[1]); getLog().debug("Adding Lineage Publisher {}:{}", def.getClassLoader(), def.getKlass().getName()); extractClassLoaderInfo(streamsetsLibsCl, userLibsCL, def.getClassLoader(), def.getKlass().getName()); } } if (executionMode == ExecutionMode.CLUSTER_YARN_STREAMING || executionMode == ExecutionMode.CLUSTER_MESOS_STREAMING) { getLog().info("Execution Mode is CLUSTER_STREAMING. Adding container jar and API jar to spark-submit"); addJarsToJarsList(containerCL, jarsToShip, "streamsets-datacollector-container-[0-9]+.*"); // EscapeUtil is required by RecordImpl#get() and RecordImpl#set(), and has no additional dependencies, so // ship this as well. addJarsToJarsList(containerCL, jarsToShip, "streamsets-datacollector-common-[0-9]+.*"); addJarsToJarsList(apiCL, jarsToShip, "streamsets-datacollector-api-[0-9]+.*"); } getLog().info("stagingDir = '{}'", stagingDir); getLog().info("bootstrapDir = '{}'", bootstrapDir); getLog().info("etcDir = '{}'", etcDir); getLog().info("resourcesDir = '{}'", resourcesDir); getLog().info("staticWebDir = '{}'", staticWebDir); Utils.checkState(staticWebDir.isDirectory(), Utils.format("Expected '{}' to be a directory", staticWebDir)); File libsTarGz = new File(stagingDir, "libs.tar.gz"); try { TarFileCreator.createLibsTarGz(findJars("api", apiCL, null), findJars("container", containerCL, null), streamsetsLibsCl, userLibsCL, staticWebDir, libsTarGz); } catch (Exception ex) { String msg = errorString("Serializing classpath: '{}'", ex); throw new RuntimeException(msg, ex); } File resourcesTarGz = new File(stagingDir, "resources.tar.gz"); try { resourcesDir = createDirectoryClone(resourcesDir, "resources", stagingDir); TarFileCreator.createTarGz(resourcesDir, resourcesTarGz); } catch (Exception ex) { String msg = errorString("Serializing resources directory: '{}': {}", resourcesDir.getName(), ex); throw new RuntimeException(msg, ex); } File etcTarGz = new File(stagingDir, "etc.tar.gz"); File sdcPropertiesFile; File bootstrapJar = getBootstrapMainJar(bootstrapDir, BOOTSTRAP_MAIN_JAR_PATTERN); File clusterBootstrapJar; String mesosHostingJarDir = null; String mesosURL = null; Pattern clusterBootstrapJarFile = findClusterBootstrapJar(executionMode, pipelineConfiguration, stageLibrary); clusterBootstrapJar = getBootstrapClusterJar(bootstrapDir, clusterBootstrapJarFile); String clusterBootstrapApiJar = getBootstrapClusterJar(bootstrapDir, CLUSTER_BOOTSTRAP_API_JAR_PATTERN) .getAbsolutePath(); if (executionMode == ExecutionMode.CLUSTER_MESOS_STREAMING) { String topic = sourceConfigs.get(TOPIC); String pipelineName = sourceInfo.get(ClusterModeConstants.CLUSTER_PIPELINE_NAME); mesosHostingJarDir = MESOS_HOSTING_DIR_PARENT + File.separatorChar + getSha256(getMesosHostingDir(topic, pipelineName)); mesosURL = runtimeInfo.getBaseHttpUrl() + File.separatorChar + mesosHostingJarDir + File.separatorChar + clusterBootstrapJar.getName(); } else if (executionMode == ExecutionMode.CLUSTER_YARN_STREAMING) { jarsToShip.add(clusterBootstrapJar.getAbsolutePath()); } try { etcDir = createDirectoryClone(etcDir, "etc", stagingDir); if (executionMode == ExecutionMode.CLUSTER_MESOS_STREAMING) { List<String> logLines = ClusterLogConfigUtils.getLogContent(runtimeInfo, "/cluster-spark-log4j.properties"); File log4jProperty = new File(etcDir, runtimeInfo.getLog4jPropertiesFileName()); if (!log4jProperty.isFile()) { throw new IllegalStateException( Utils.format("Log4j config file doesn't exist: '{}'", log4jProperty.getAbsolutePath())); } Files.write(log4jProperty.toPath(), logLines, Charset.defaultCharset()); } PipelineInfo pipelineInfo = Utils.checkNotNull(pipelineConfiguration.getInfo(), "Pipeline Info"); String pipelineName = pipelineInfo.getPipelineId(); File rootDataDir = new File(etcDir, "data"); File pipelineBaseDir = new File(rootDataDir, PipelineDirectoryUtil.PIPELINE_INFO_BASE_DIR); File pipelineDir = new File(pipelineBaseDir, PipelineUtils.escapedPipelineName(pipelineName)); if (!pipelineDir.exists()) { if (!pipelineDir.mkdirs()) { throw new RuntimeException("Failed to create pipeline directory " + pipelineDir.getPath()); } } File pipelineRunInfoDir = new File( new File(new File(rootDataDir, PipelineDirectoryUtil.PIPELINE_BASE_DIR), PipelineUtils.escapedPipelineName(pipelineName)), "0"); if (!pipelineRunInfoDir.mkdirs()) { throw new RuntimeException( Utils.format("Failed to create pipeline directory: '{}'", pipelineRunInfoDir.getPath())); } File pipelineFile = new File(pipelineDir, FilePipelineStoreTask.PIPELINE_FILE); ObjectMapperFactory.getOneLine().writeValue(pipelineFile, BeanHelper.wrapPipelineConfiguration(pipelineConfiguration)); File infoFile = new File(pipelineDir, FilePipelineStoreTask.INFO_FILE); ObjectMapperFactory.getOneLine().writeValue(infoFile, BeanHelper.wrapPipelineInfo(pipelineInfo)); Utils.checkNotNull(ruleDefinitions, "ruleDefinitions"); File rulesFile = new File(pipelineDir, FilePipelineStoreTask.RULES_FILE); ObjectMapperFactory.getOneLine().writeValue(rulesFile, BeanHelper.wrapRuleDefinitions(ruleDefinitions)); if (null != acl) { // acl could be null if permissions is not enabled File aclFile = new File(pipelineDir, FileAclStoreTask.ACL_FILE); ObjectMapperFactory.getOneLine().writeValue(aclFile, AclDtoJsonMapper.INSTANCE.toAclJson(acl)); } copyBlobstore(blobStoreResources, rootDataDir, pipelineDir); sdcPropertiesFile = new File(etcDir, "sdc.properties"); if (executionMode == ExecutionMode.CLUSTER_MESOS_STREAMING) { String hdfsS3ConfDirValue = PipelineBeanCreator.get().getHdfsS3ConfDirectory(pipelineConfiguration); if (hdfsS3ConfDirValue != null && !hdfsS3ConfDirValue.isEmpty()) { File hdfsS3ConfDir = new File(resourcesDir, hdfsS3ConfDirValue).getAbsoluteFile(); if (!hdfsS3ConfDir.exists()) { String msg = Utils.format("HDFS/S3 Checkpoint Configuration Directory '{}' doesn't exist", hdfsS3ConfDir.getPath()); throw new IllegalArgumentException(msg); } else { File coreSite = new File(hdfsS3ConfDir, "core-site.xml"); if (!coreSite.exists()) { String msg = Utils.format( "HDFS/S3 Checkpoint Configuration file core-site.xml '{}' doesn't exist", coreSite.getPath()); throw new IllegalStateException(msg); } sourceConfigs.put("hdfsS3ConfDir", hdfsS3ConfDirValue); } } else { throw new IllegalStateException("HDFS/S3 Checkpoint configuration directory is required"); } } // Adding SCH generated properties to sdc property to ship together List<File> additionalPropFiles = new LinkedList<>(); if (runtimeInfo.isDPMEnabled()) { File schPropertiesFile = new File(runtimeInfo.getDataDir(), RuntimeInfo.SCH_CONF_OVERRIDE); if (schPropertiesFile.exists()) { additionalPropFiles.add(schPropertiesFile); } } rewriteProperties(sdcPropertiesFile, additionalPropFiles, etcDir, sourceConfigs, sourceInfo, clusterToken, Optional.ofNullable(mesosURL)); TarFileCreator.createTarGz(etcDir, etcTarGz); } catch (IOException | RuntimeException ex) { String msg = errorString("Error while preparing for cluster job submission: {}", ex); throw new RuntimeException(msg, ex); } File log4jProperties = new File(stagingDir, "log4j.properties"); InputStream clusterLog4jProperties = null; try { List<String> lines = null; if (executionMode == ExecutionMode.CLUSTER_BATCH) { lines = getLog4jConfig(executionMode, "/cluster-mr-log4j.properties"); } else if (executionMode == ExecutionMode.CLUSTER_YARN_STREAMING) { lines = getLog4jConfig(executionMode, "/cluster-spark-log4j.properties"); } if (lines != null) { Files.write(log4jProperties.toPath(), lines, Charset.defaultCharset()); } } catch (IOException ex) { String msg = errorString("copying log4j configuration: {}", ex); throw new RuntimeException(msg, ex); } finally { if (clusterLog4jProperties != null) { IOUtils.closeQuietly(clusterLog4jProperties); } } return startPipelineExecute(outputDir, sourceInfo, pipelineConfiguration, pipelineConfigBean, timeToWaitForFailure, stagingDir, //* required by shell script clusterToken, clusterBootstrapJar, //* Main Jar bootstrapJar, //* local JAR 1 jarsToShip, //* local JARs 2+ libsTarGz, //* archive resourcesTarGz, //* archive etcTarGz, //* archive sdcPropertiesFile, //* needed for driver invocation log4jProperties, //* need for driver invocation mesosHostingJarDir, mesosURL, clusterBootstrapApiJar, errors); }
From source file:com.streamsets.pipeline.stage.origin.logtail.TestFileTailSource.java
@Test public void testTailFilesDeletion() throws Exception { File testDataDir = new File("target", UUID.randomUUID().toString()); File testDataDir1 = new File(testDataDir, UUID.randomUUID().toString()); File testDataDir2 = new File(testDataDir, UUID.randomUUID().toString()); Assert.assertTrue(testDataDir1.mkdirs()); Assert.assertTrue(testDataDir2.mkdirs()); Path file1 = new File(testDataDir1, "log1.txt").toPath(); Path file2 = new File(testDataDir2, "log2.txt").toPath(); Files.write(file1, Arrays.asList("Hello"), UTF8); Files.write(file2, Arrays.asList("Hola"), UTF8); FileInfo fileInfo1 = new FileInfo(); fileInfo1.fileFullPath = testDataDir.getAbsolutePath() + "/*/log*.txt"; fileInfo1.fileRollMode = FileRollMode.REVERSE_COUNTER; fileInfo1.firstFile = ""; fileInfo1.patternForToken = ""; FileTailConfigBean conf = new FileTailConfigBean(); conf.dataFormat = DataFormat.TEXT;//from w ww . j a v a 2 s . co m conf.multiLineMainPattern = ""; conf.batchSize = 25; conf.maxWaitTimeSecs = 1; conf.fileInfos = Arrays.asList(fileInfo1); conf.postProcessing = PostProcessingOptions.NONE; conf.dataFormatConfig.textMaxLineLen = 1024; FileTailSource source = new FileTailSource(conf, SCAN_INTERVAL); SourceRunner runner = new SourceRunner.Builder(FileTailDSource.class, source).addOutputLane("lane") .addOutputLane("metadata").build(); try { runner.runInit(); StageRunner.Output output = runner.runProduce(null, 10); output = runner.runProduce(output.getNewOffset(), 10); Assert.assertTrue(output.getNewOffset().contains("log1.txt")); Assert.assertTrue(output.getNewOffset().contains("log2.txt")); Files.delete(file1); Files.delete(testDataDir1.toPath()); output = runner.runProduce(output.getNewOffset(), 10); output = runner.runProduce(output.getNewOffset(), 10); Assert.assertFalse(output.getNewOffset().contains("log1.txt")); Assert.assertTrue(output.getNewOffset().contains("log2.txt")); } finally { runner.runDestroy(); } }
From source file:adalid.util.velocity.BaseBuilder.java
private boolean write(String target, List<String> lines, Charset charset) { try {//from w ww.ja v a2 s . c om Path path = Paths.get(target); Files.write(path, lines, charset); return true; } catch (IOException ex) { writingErrors++; logger.fatal(ex); logger.fatal("\t" + target + " could not be written "); } return false; }